v1alpha1

package
v0.6.310 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 18, 2024 License: Apache-2.0 Imports: 27 Imported by: 0

Documentation

Overview

  • Copyright (c) 2020. *

  • Metaprov.com

  • Copyright (c) 2020. *

  • Metaprov.com

+groupName=data.modela.ai +kubebuilder:object:generate=true

+kubebuilder:object:generate=true +groupName=data.modela.ai

Index

Constants

View Source
const (
	DataPipelineReady = "Ready"
	DataPipelineSaved = "Saved"
)

/ DataPipeline Condition

View Source
const (
	DataPipelineRunCompleted = "Completed"
	DataPipelineRunSaved     = "Saved"
)

/ DataPipelineRun Condition

View Source
const (
	MultiDatasetSameNumberOfRows      catalog.AssertionType = "multi-dataset-same-number-of-rows"
	MultiDatasetOuterJoinEmpty        catalog.AssertionType = "multi-dataset-outer-join-empty"
	MultiDatasetOuterJoinNotEmpty     catalog.AssertionType = "multi-dataset-outer-join-not-empty"
	MultiDatasetInnerJoinEmpty        catalog.AssertionType = "multi-dataset-inner-join-empty"
	MultiDatasetInnerJoinNotEmpty     catalog.AssertionType = "multi-dataset-inner-join-not-empty"
	MultiDatasetLeftJoinEmpty         catalog.AssertionType = "multi-dataset-left-join-empty"
	MultiDatasetLeftJoinNotEmpty      catalog.AssertionType = "multi-dataset-left-join-not-empty"
	MultiDatasetRightJoinEmpty        catalog.AssertionType = "multi-dataset-right-join-empty"
	MultiDatasetRightJoinNotEmpty     catalog.AssertionType = "multi-dataset-right-join-not-empty"
	DatasetColumnsCountEqual          catalog.AssertionType = "dataset-columns-count-equal"
	DatasetColumnsNameInSet           catalog.AssertionType = "dataset-columns-in-set"
	DatasetColumnsInOrderedList       catalog.AssertionType = "dataset-columns-in-ordered-list"
	DatasetRowCountBetween            catalog.AssertionType = "dataset-row-count-between"
	DatasetNotEmpty                   catalog.AssertionType = "dataset-dataset-not-empty"
	DatasetEmpty                      catalog.AssertionType = "dataset-empty"
	MultiColumnCorr                   catalog.AssertionType = "multi-column-corr"
	MultiColumnUnique                 catalog.AssertionType = "multi-column-unique"
	MultiColumnLessThan               catalog.AssertionType = "multi-column-less-than"
	MultiColumnLessThanOrEqualTo      catalog.AssertionType = "multi-column-less-than-or-equal-to"
	MultiColumnUniqueValueRatio       catalog.AssertionType = "multi-column-unique-value-ratio"
	ColumnExist                       catalog.AssertionType = "column-exist"
	ColumnHaveValues                  catalog.AssertionType = "column-have-values"
	ColumnHasNoValue                  catalog.AssertionType = "column-has-no-values"
	ColumnHaveNulls                   catalog.AssertionType = "column-value-have-nulls"
	ColumnHasNoNull                   catalog.AssertionType = "column-value-has-no-nulls"
	ColumnOfType                      catalog.AssertionType = "column-of-type"
	ColumnValuesInSet                 catalog.AssertionType = "column-values-in-set"
	ColumnValuesIncreasing            catalog.AssertionType = "column-values-increasing"
	ColumnsValuesDecreasing           catalog.AssertionType = "column-values-decreasing"
	ColumnValueLengthBetween          catalog.AssertionType = "column-value-length-between"
	ColumnValueNameMatchPattern       catalog.AssertionType = "column-value-match-pattern"
	ColumnValueIsDate                 catalog.AssertionType = "column-value-is-date"
	ColumnValueIsJson                 catalog.AssertionType = "column-value-is-json"
	ColumnValueInDomain               catalog.AssertionType = "column-value-in-domain"
	ColumnUniqueValueCountBetween     catalog.AssertionType = "column-unique-value-count-between"
	ColumnOutlierValueUniqueBetween   catalog.AssertionType = "column-outlier-value-count-between"
	ColumnValidValueUniqueBetween     catalog.AssertionType = "column-valid-values-count-between"
	ColumnMismatchValueBetween        catalog.AssertionType = "column-mismatch-values-between"
	ColumnValueMinBetween             catalog.AssertionType = "column-value-min-between"
	ColumnValueLowerQuartileBetween   catalog.AssertionType = "column-value-lower-quartile-between"
	ColumnValueMedianBetween          catalog.AssertionType = "column-value-median-between"
	ColumnValueAvgBetween             catalog.AssertionType = "column-value-average-between"
	ColumnValueUpperQuartileBetween   catalog.AssertionType = "column-value-upper-quartile-between"
	ColumnValueMaxBetween             catalog.AssertionType = "column-value-max-between"
	ColumnValueStddevBetween          catalog.AssertionType = "column-value-stddev-between"
	ColumnValueChiSquarePValueBetween catalog.AssertionType = "column-value-chi-square-p-value-between"
	ColumnValuePairCramersBetween     catalog.AssertionType = "column-value-pair-cramers-between"
	ColumnValueHasEntropy             catalog.AssertionType = "column-value-has-entropy"
	ColumnValueMinLength              catalog.AssertionType = "column-value-min-length"
	ColumnValueMaxLength              catalog.AssertionType = "column-value-max-length"
	ColumnValueNonNegative            catalog.AssertionType = "column-value-non-negative"
	ColumnValuePositive               catalog.AssertionType = "column-value-positive"
	FileSizeBetween                   catalog.AssertionType = "file-size-between"
	FileExist                         catalog.AssertionType = "file-exist"
	FileRegexMatchCountBetween        catalog.AssertionType = "file-regex-match-count-between"
	FileValidJson                     catalog.AssertionType = "file-valid-json"
	FileValidCsv                      catalog.AssertionType = "file-valid-csv"
	// Pre defined type
	ColumnValuePhoneNumber catalog.AssertionType = "column-value-phone-number"
	ColumnValueEmail       catalog.AssertionType = "column-value-email"
	ColumnValueCreditCard  catalog.AssertionType = "column-value-credit-card"
	ColumnValueBase64      catalog.AssertionType = "column-value-base64"
	ColumnValueIsbn10      catalog.AssertionType = "column-value-isbn10"
	ColumnValueIsbn13      catalog.AssertionType = "column-value-isbn13"
	ColumnValueUUID3       catalog.AssertionType = "column-value-uuid3"
	ColumnValueUUID4       catalog.AssertionType = "column-value-uuid4"
	ColumnValueUUID5       catalog.AssertionType = "column-value-uuid5"
	ColumnValueAscii       catalog.AssertionType = "column-value-ascii"
	ColumnValueLatitude    catalog.AssertionType = "column-value-latitude"
	ColumnValueLongitude   catalog.AssertionType = "column-value-longitude"
	ColumnValueSSN         catalog.AssertionType = "column-value-ssn"
	ColumnValueHostName    catalog.AssertionType = "column-value-hostname"
)
View Source
const (
	// Regular state of the store.
	DatasourceReady = "Ready"
	DatasourceSaved = "Saved"
)

/ Datastore Condition

View Source
const (
	ModelDriftTwoSampleKSTestLessThan     catalog.AssertionType = "model-two-sample-ks-test-less-than"
	ModelDriftChiSquaredLessThan          catalog.AssertionType = "model-chi-squared-test-less-than"
	ModelDriftProportionDiffTestLessThan  catalog.AssertionType = "model-proportion-difference-test-less-than"
	ModelDriftWassersteinDistanceLessThan catalog.AssertionType = "model-wasserstein-distance-less-than"
	ModelDriftJSDivergenceLessThan        catalog.AssertionType = "model-js-divergence-less"
	ModelDriftPSILessThan                 catalog.AssertionType = "model-psi-less-than"
	ModelDriftKLDivergenceLessThan        catalog.AssertionType = "model-kl-divergence-less-than"
)
View Source
const (
	FeatureHistogramReady      = "Ready"
	FeatureHistogramUnitTested = "UnitTested"
	FeatureHistogramSaved      = "Saved"
)

/ FeatureHistogram Condition

View Source
const (
	FeatureGroupReady    = "Ready"
	FeatureGroupSynced   = "Synced"
	FeatureGroupIngested = "Ingested"
)
View Source
const (
	RecipeReady = "Ready"
	RecipeSaved = "Saved"
)

/ RecipeName Condition

View Source
const (
	EntityReady = "Ready"
)

/ Entity Condition

View Source
const (
	RecipeRunCompleted = "Completed"
)

/ RecipeName Condition

Variables

View Source
var (
	ErrInvalidLengthGenerated        = fmt.Errorf("proto: negative length found during unmarshaling")
	ErrIntOverflowGenerated          = fmt.Errorf("proto: integer overflow")
	ErrUnexpectedEndOfGroupGenerated = fmt.Errorf("proto: unexpected end of group")
)
View Source
var (
	// SchemeBuilder registers our types
	SchemeBuilder = k8sruntime.NewSchemeBuilder(AddKnownTypes)
	// AddToScheme local alias for SchemeBuilder.AddToScheme
	AddToScheme = SchemeBuilder.AddToScheme
)
View Source
var SchemeGroupVersion = schema.GroupVersion{Group: data.GroupName, Version: "v1alpha1"}

SchemeGroupVersion is group version used to register these objects

Functions

func AddKnownTypes

func AddKnownTypes(scheme *k8sruntime.Scheme) error

Adds the list of known types to api.Scheme.

func Kind

func Kind(kind string) schema.GroupKind

Kind takes an unqualified kind and returns back a Group qualified GroupKind

func Resource

func Resource(resource string) schema.GroupResource

Resource takes an unqualified resource and returns a Group qualified GroupResource

Types

type ApprovalReviewStatus added in v0.6.252

type ApprovalReviewStatus struct {
	// Reviewer specifies the name of the Account which is responsible for this review
	Reviewer string `json:"reviewer,omitempty" protobuf:"bytes,1,opt,name=reviewer"`
	// The approval status, which can be approved, rejected, or unconfirmed
	Result ApprovalType `json:"result,omitempty" protobuf:"bytes,2,opt,name=result"`
	// The date at which the review took place. If empty, the review is still unconfirmed
	ApprovedAt *metav1.Time `json:"approvedAt,omitempty" protobuf:"bytes,3,opt,name=approvedAt"`
	// The notes for the review, created by the reviewer at the time of the decision
	Notes string `json:"notes,omitempty" protobuf:"bytes,4,opt,name=notes"`
}

func (*ApprovalReviewStatus) DeepCopy added in v0.6.252

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ApprovalReviewStatus.

func (*ApprovalReviewStatus) DeepCopyInto added in v0.6.252

func (in *ApprovalReviewStatus) DeepCopyInto(out *ApprovalReviewStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ApprovalReviewStatus) Descriptor added in v0.6.252

func (*ApprovalReviewStatus) Descriptor() ([]byte, []int)

func (*ApprovalReviewStatus) Marshal added in v0.6.252

func (m *ApprovalReviewStatus) Marshal() (dAtA []byte, err error)

func (*ApprovalReviewStatus) MarshalTo added in v0.6.252

func (m *ApprovalReviewStatus) MarshalTo(dAtA []byte) (int, error)

func (*ApprovalReviewStatus) MarshalToSizedBuffer added in v0.6.252

func (m *ApprovalReviewStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ApprovalReviewStatus) ProtoMessage added in v0.6.252

func (*ApprovalReviewStatus) ProtoMessage()

func (*ApprovalReviewStatus) Reset added in v0.6.252

func (m *ApprovalReviewStatus) Reset()

func (*ApprovalReviewStatus) Size added in v0.6.252

func (m *ApprovalReviewStatus) Size() (n int)

func (*ApprovalReviewStatus) String added in v0.6.252

func (this *ApprovalReviewStatus) String() string

func (*ApprovalReviewStatus) Unmarshal added in v0.6.252

func (m *ApprovalReviewStatus) Unmarshal(dAtA []byte) error

func (*ApprovalReviewStatus) XXX_DiscardUnknown added in v0.6.252

func (m *ApprovalReviewStatus) XXX_DiscardUnknown()

func (*ApprovalReviewStatus) XXX_Marshal added in v0.6.252

func (m *ApprovalReviewStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ApprovalReviewStatus) XXX_Merge added in v0.6.252

func (m *ApprovalReviewStatus) XXX_Merge(src proto.Message)

func (*ApprovalReviewStatus) XXX_Size added in v0.6.252

func (m *ApprovalReviewStatus) XXX_Size() int

func (*ApprovalReviewStatus) XXX_Unmarshal added in v0.6.252

func (m *ApprovalReviewStatus) XXX_Unmarshal(b []byte) error

type ApprovalSpec added in v0.6.252

type ApprovalSpec struct {
	// Enabled indicates if model approval is enabled
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"`
	// DecisionType specifies the requirements for the model to be approved in the case of there being multiple reviewers
	// +kubebuilder:validation:Optional
	DecisionType *DecisionType `json:"decisionType,omitempty" protobuf:"varint,2,opt,name=decisionType"`
	// Reviewers contains the name of Accounts that will be required to approve models for promotion
	// +kubebuilder:validation:Optional
	Reviewers []string `json:"members,omitempty" protobuf:"bytes,3,rep,name=members"`
}

ApprovalSpec describes the approval requirements for a Model

func (*ApprovalSpec) DeepCopy added in v0.6.252

func (in *ApprovalSpec) DeepCopy() *ApprovalSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ApprovalSpec.

func (*ApprovalSpec) DeepCopyInto added in v0.6.252

func (in *ApprovalSpec) DeepCopyInto(out *ApprovalSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ApprovalSpec) Descriptor added in v0.6.252

func (*ApprovalSpec) Descriptor() ([]byte, []int)

func (*ApprovalSpec) Marshal added in v0.6.252

func (m *ApprovalSpec) Marshal() (dAtA []byte, err error)

func (*ApprovalSpec) MarshalTo added in v0.6.252

func (m *ApprovalSpec) MarshalTo(dAtA []byte) (int, error)

func (*ApprovalSpec) MarshalToSizedBuffer added in v0.6.252

func (m *ApprovalSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ApprovalSpec) ProtoMessage added in v0.6.252

func (*ApprovalSpec) ProtoMessage()

func (*ApprovalSpec) Reset added in v0.6.252

func (m *ApprovalSpec) Reset()

func (*ApprovalSpec) Size added in v0.6.252

func (m *ApprovalSpec) Size() (n int)

func (*ApprovalSpec) String added in v0.6.252

func (this *ApprovalSpec) String() string

func (*ApprovalSpec) Unmarshal added in v0.6.252

func (m *ApprovalSpec) Unmarshal(dAtA []byte) error

func (*ApprovalSpec) XXX_DiscardUnknown added in v0.6.252

func (m *ApprovalSpec) XXX_DiscardUnknown()

func (*ApprovalSpec) XXX_Marshal added in v0.6.252

func (m *ApprovalSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ApprovalSpec) XXX_Merge added in v0.6.252

func (m *ApprovalSpec) XXX_Merge(src proto.Message)

func (*ApprovalSpec) XXX_Size added in v0.6.252

func (m *ApprovalSpec) XXX_Size() int

func (*ApprovalSpec) XXX_Unmarshal added in v0.6.252

func (m *ApprovalSpec) XXX_Unmarshal(b []byte) error

type ApprovalStatus added in v0.6.252

type ApprovalStatus struct {
	// Status defines the overall approval status of the Model
	Status ApprovalType `json:"status,omitempty" protobuf:"bytes,1,opt,name=status"`
	// Reviews contains an ApprovalReviewStatus for each reviewer specified by the ApprovalSpec for the Model
	Reviews []ApprovalReviewStatus `json:"reviews,omitempty" protobuf:"bytes,2,opt,name=reviews"`
}

ApprovalStatus describes the current state of a Model's approval review

func (*ApprovalStatus) DeepCopy added in v0.6.252

func (in *ApprovalStatus) DeepCopy() *ApprovalStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ApprovalStatus.

func (*ApprovalStatus) DeepCopyInto added in v0.6.252

func (in *ApprovalStatus) DeepCopyInto(out *ApprovalStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ApprovalStatus) Descriptor added in v0.6.252

func (*ApprovalStatus) Descriptor() ([]byte, []int)

func (*ApprovalStatus) Marshal added in v0.6.252

func (m *ApprovalStatus) Marshal() (dAtA []byte, err error)

func (*ApprovalStatus) MarshalTo added in v0.6.252

func (m *ApprovalStatus) MarshalTo(dAtA []byte) (int, error)

func (*ApprovalStatus) MarshalToSizedBuffer added in v0.6.252

func (m *ApprovalStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ApprovalStatus) ProtoMessage added in v0.6.252

func (*ApprovalStatus) ProtoMessage()

func (*ApprovalStatus) Reset added in v0.6.252

func (m *ApprovalStatus) Reset()

func (*ApprovalStatus) Size added in v0.6.252

func (m *ApprovalStatus) Size() (n int)

func (*ApprovalStatus) String added in v0.6.252

func (this *ApprovalStatus) String() string

func (*ApprovalStatus) Unmarshal added in v0.6.252

func (m *ApprovalStatus) Unmarshal(dAtA []byte) error

func (*ApprovalStatus) XXX_DiscardUnknown added in v0.6.252

func (m *ApprovalStatus) XXX_DiscardUnknown()

func (*ApprovalStatus) XXX_Marshal added in v0.6.252

func (m *ApprovalStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ApprovalStatus) XXX_Merge added in v0.6.252

func (m *ApprovalStatus) XXX_Merge(src proto.Message)

func (*ApprovalStatus) XXX_Size added in v0.6.252

func (m *ApprovalStatus) XXX_Size() int

func (*ApprovalStatus) XXX_Unmarshal added in v0.6.252

func (m *ApprovalStatus) XXX_Unmarshal(b []byte) error

type ApprovalType

type ApprovalType string
const (
	DecisionTypeUnanimous ApprovalType = "unanimous"
	DecisionTypeMajority  ApprovalType = "majority"
	DecisionTypeOneOrMore ApprovalType = "one-or-more"
)
const (
	ApprovalTypeApproved    ApprovalType = "approved"
	ApprovalTypeReject      ApprovalType = "rejected"
	ApprovalTypeUnconfirmed ApprovalType = "unconfirmed"
)

type BarChartSpec

type BarChartSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// name of the X column
	// +kubebuilder:validation:Optional
	X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"`
	// Y column
	// +kubebuilder:validation:Optional
	Y *string `json:"y,omitempty" protobuf:"bytes,3,opt,name=y"`
	// Show borther
	// +kubebuilder:validation:Optional
	Legend *bool `json:"legend,omitempty" protobuf:"varint,4,opt,name=legend"`
	// Show borther
	// +kubebuilder:validation:Optional
	Sort *bool `json:"sort,omitempty" protobuf:"varint,5,opt,name=sort"`
}

Bar Chart

func (*BarChartSpec) DeepCopy

func (in *BarChartSpec) DeepCopy() *BarChartSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BarChartSpec.

func (*BarChartSpec) DeepCopyInto

func (in *BarChartSpec) DeepCopyInto(out *BarChartSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*BarChartSpec) Descriptor

func (*BarChartSpec) Descriptor() ([]byte, []int)

func (*BarChartSpec) Marshal

func (m *BarChartSpec) Marshal() (dAtA []byte, err error)

func (*BarChartSpec) MarshalTo

func (m *BarChartSpec) MarshalTo(dAtA []byte) (int, error)

func (*BarChartSpec) MarshalToSizedBuffer

func (m *BarChartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*BarChartSpec) ProtoMessage

func (*BarChartSpec) ProtoMessage()

func (*BarChartSpec) Reset

func (m *BarChartSpec) Reset()

func (*BarChartSpec) Size

func (m *BarChartSpec) Size() (n int)

func (*BarChartSpec) String

func (this *BarChartSpec) String() string

func (*BarChartSpec) Unmarshal

func (m *BarChartSpec) Unmarshal(dAtA []byte) error

func (*BarChartSpec) XXX_DiscardUnknown

func (m *BarChartSpec) XXX_DiscardUnknown()

func (*BarChartSpec) XXX_Marshal

func (m *BarChartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*BarChartSpec) XXX_Merge

func (m *BarChartSpec) XXX_Merge(src proto.Message)

func (*BarChartSpec) XXX_Size

func (m *BarChartSpec) XXX_Size() int

func (*BarChartSpec) XXX_Unmarshal

func (m *BarChartSpec) XXX_Unmarshal(b []byte) error

type Column

type Column struct {
	// The name of the column
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:MinLength=1
	Name string `json:"name" protobuf:"bytes,1,opt,name=name"`
	// The display name of the column, which is used in reports and other visual elements. If omitted, it will use the raw name
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:Optional
	DisplayName *string `json:"displayName,omitempty" protobuf:"bytes,2,opt,name=displayName"`
	// The data type of the feature (e.g. number, string, boolean, etc.)
	DataType catalog.DataType `json:"datatype" protobuf:"bytes,3,opt,name=datatype"`
	// The data domain of the feature, which constrains the contents of the feature to a specific set of values
	// +kubebuilder:validation:Optional
	Format *catalog.DataDomain `json:"format,omitempty" protobuf:"bytes,4,opt,name=format"`
	// The user-specified description of the feature
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,5,opt,name=description"`
	// Indicates if the feature should be ignored when building models
	// +kubebuilder:validation:Optional
	Ignore *bool `json:"ignore,omitempty" protobuf:"varint,6,opt,name=ignore"`
	// Indicates if the feature is the target feature of the model, and the feature which predictions will be made on
	// +kubebuilder:validation:Optional
	Target *bool `json:"target,omitempty" protobuf:"varint,7,opt,name=target"`
	// Indicates if the column can contain null values
	// +kubebuilder:validation:Optional
	Nullable *bool `json:"nullable,omitempty" protobuf:"varint,8,opt,name=nullable"`
	// Denotes if the column specifies a primary key of a database table (i.e. a users ID)
	// +kubebuilder:validation:Optional
	PK *bool `json:"pk,omitempty" protobuf:"varint,9,opt,name=pk"`
	// Denotes if the column specifies a foreign key of another database table
	// +kubebuilder:validation:Optional
	FK *bool `json:"fk,omitempty" protobuf:"varint,10,opt,name=fk"`
	// The integer value which the values of the column should be a multiple of
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MultipleOf *int32 `json:"multipleOf,omitempty" protobuf:"varint,11,opt,name=multipleOf"`
	// The maximum value of values all values in the column
	// +kubebuilder:validation:Optional
	Maximum *float64 `json:"maximum,omitempty" protobuf:"bytes,12,opt,name=maximum"`
	// The exclusive upper limit of all values in the column, which does not include the maximum value
	// +kubebuilder:validation:Optional
	ExclusiveMaximum *bool `json:"exclusiveMaximum,omitempty" protobuf:"varint,13,opt,name=exclusiveMaximum"`
	// The minimum value of values all values in the column
	// +kubebuilder:validation:Optional
	Minimum *float64 `json:"minimum,omitempty" protobuf:"bytes,14,opt,name=minimum"`
	// The exclusive lower limit of all values in the column, which does not include the minimum value
	// +kubebuilder:validation:Optional
	ExclusiveMinimum *bool `json:"exclusiveMinimum,omitempty" protobuf:"varint,15,opt,name=exclusiveMinimum"`
	// The maximum length of values in the column, if the column data type is a string
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MaxLength *int32 `json:"maxLength,omitempty" protobuf:"varint,16,opt,name=maxLength"`
	// The minimum length of values in the column, if the column data type is a string
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MinLength *int32 `json:"minLength,omitempty" protobuf:"varint,17,opt,name=minLength"`
	// The regex pattern which values in the column must adhere to
	// +kubebuilder:validation:Optional
	Pattern *string `json:"pattern,omitempty" protobuf:"bytes,18,opt,name=pattern"`
	// Required
	// +kubebuilder:validation:Optional
	Required *bool `json:"required,omitempty" protobuf:"varint,19,opt,name=required"`
	// A user-specified example value
	// +kubebuilder:validation:Optional
	Example *string `json:"example,omitempty" protobuf:"bytes,20,opt,name=example"`
	// A link to user-specified external documentation
	// +kubebuilder:validation:Optional
	ExternalDocs *string `json:"externalDocs,omitempty" protobuf:"bytes,21,opt,name=externalDocs"`
	// The collection of unique values for categorical features
	// +kubebuilder:validation:Optional
	Enum []string `json:"enum,omitempty" protobuf:"bytes,22,rep,name=enum"`
	// The maximum number of items if the column is a list of values
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MaxItems *int32 `json:"maxItems,omitempty" protobuf:"varint,24,opt,name=maxItems"`
	// The minimum number of items if the column is a list of values
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MinItems *int32 `json:"minItems,omitempty" protobuf:"varint,25,opt,name=minItems"`
	// Enforce that all the items in the list are unique
	// +kubebuilder:validation:Optional
	UniqueItems *bool `json:"uniqueItems,omitempty" protobuf:"varint,26,opt,name=uniqueItems"`
	// Indicates if the column contains personally identifiable information
	// +kubebuilder:validation:Optional
	PII *bool `json:"pii,omitempty" protobuf:"varint,28,opt,name=pii"`
	// Indicates if the column contains personal health information
	// +kubebuilder:validation:Optional
	PHI *bool `json:"phi,omitempty" protobuf:"varint,29,opt,name=phi"`
	// Protected means that this feature is important for ethical AI / Fairness
	// +kubebuilder:validation:Optional
	Protected *bool `json:"protected,omitempty" protobuf:"varint,31,opt,name=protected"`
	// The default value for number types; used internally for synthetic data and validation
	DefaultValueNum *float64 `json:"DefaultValueNum,omitempty" protobuf:"bytes,32,opt,name=defaultValueNum"`
	// Indicates if values from this column will be sampled on a logarithmic scale
	// +kubebuilder:validation:Optional
	Log *bool `json:"log,omitempty" protobuf:"varint,33,opt,name=log"`
	// Mu is the mean of the normal distribution
	// +kubebuilder:validation:Optional
	Mu *float64 `json:"mu,omitempty" protobuf:"bytes,34,opt,name=mu"`
	// Sigma is the standard deviation of the distribution
	// +kubebuilder:validation:Optional
	Sigma *float64 `json:"sigma,omitempty" protobuf:"bytes,35,opt,name=sigma"`
	// The threshold skew for skew detection for the feature represented by this feature.
	// +kubebuilder:validation:Optional
	SkewThreshold *float64 `json:"skewThreshold,omitempty" protobuf:"bytes,36,opt,name=skewThreshold"`
	// The threshold drift value for model drift detection for the feature represented by this feature
	// +kubebuilder:validation:Optional
	DriftThreshold *float64 `json:"driftThreshold,omitempty" protobuf:"bytes,37,opt,name=driftThreshold"`
	// Indicates if the column is an key column
	// +kubebuilder:validation:Optional
	Key *bool `json:"key,omitempty" protobuf:"varint,38,opt,name=key"`
	// Indicates if the column holds fold values
	// +kubebuilder:validation:Optional
	Fold *bool `json:"fold,omitempty" protobuf:"varint,39,opt,name=fold"`
	// If True than this is a weight column
	// +kubebuilder:validation:Optional
	Weight *bool `json:"weight,omitempty" protobuf:"varint,40,opt,name=weight"`
	// Indicates that the feature should always be used in training
	// +kubebuilder:validation:Optional
	Reserved *bool `json:"reserved,omitempty" protobuf:"varint,41,opt,name=reserved"`
	// The recommended imputation method for the column
	// +kubebuilder:validation:Optional
	Imputation *catalog.Imputation `json:"imputation,omitempty" protobuf:"bytes,42,opt,name=imputation"`
	// The recommended scaling method for the column
	// +kubebuilder:validation:Optional
	Scaling *catalog.Scaling `json:"scaling,omitempty" protobuf:"bytes,43,opt,name=scaling"`
	// Indicates if the feature was automatically generated
	// +kubebuilder:validation:Optional
	Generated *bool `json:"generated,omitempty" protobuf:"varint,44,opt,name=generated"`
	// The formula used to generate the column
	// +kubebuilder:validation:Optional
	Formula *string `json:"formula,omitempty" protobuf:"bytes,45,opt,name=formula"`
	// Indicates if the column is an ID column
	// +kubebuilder:validation:Optional
	ID *bool `json:"id,omitempty" protobuf:"varint,46,opt,name=id"`
	// The step value if the column values are a sequence of numbers
	// +kubebuilder:validation:Optional
	Step *float64 `json:"step,omitempty" protobuf:"bytes,47,opt,name=step"`
	// Contain the Index for the column in the schema
	// +kubebuilder:validation:Optional
	Loc int32 `json:"loc,omitempty" protobuf:"varint,48,opt,name=loc"`
	// The format of the datetime column. This is only setup if the column contain datetime type.
	// +kubebuilder:validation:Optional
	DateTimeFormat *string `json:"datetimeFormat,omitempty" protobuf:"bytes,49,opt,name=datetimeFormat"`
	// Indicates if the column is contain a time series,
	// In case of forecasting, if only one column is a time series, this is a univariate time series
	// Otherwise, if two or more columns contain time series, than this is a univariate time series.
	// +kubebuilder:validation:Optional
	TimeSeries *bool `json:"timeseries,omitempty" protobuf:"varint,50,opt,name=timeseries"`
	// In forecasting based data sets Indicates if the column is regressor
	// This is relevant only for time series schema
	// +kubebuilder:validation:Optional
	Regressor *bool `json:"regressor,omitempty" protobuf:"varint,51,opt,name=regressor"`
	// In forecasting based data sets Indicates if the column is regressor
	// This is relevant only for time series schema
	// +kubebuilder:validation:Optional
	LaggedRegressor *bool `json:"laggedRegressor,omitempty" protobuf:"varint,52,opt,name=laggedRegressor"`
	// For time series, the field indicate tha this column will be used as the data time index
	// for the time series. Note that there can multiple datatime type columns, but only one
	// time column.
	// +kubebuilder:validation:Optional
	TimeIndex *bool `json:"timeIndex,omitempty" protobuf:"varint,53,opt,name=timeIndex"`
	// If this column is aggregate. Define the aggregate
	// +kubebuilder:validation:Optional
	Aggr *catalog.Aggregate `json:"aggr,omitempty" protobuf:"varint,54,opt,name=aggr"`
	// If this column is aggregate
	// +kubebuilder:validation:Optional
	Window *int32 `json:"window,omitempty" protobuf:"varint,55,opt,name=window"`
}

Column specifies the attribute of a single column in a dataset. The fields of the Column align with the JSON schema standard; you can view detailed documentation at https://json-schema.org/draft/2020-12/json-schema-validation.html

func (*Column) DeepCopy

func (in *Column) DeepCopy() *Column

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Column.

func (*Column) DeepCopyInto

func (in *Column) DeepCopyInto(out *Column)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Column) Descriptor

func (*Column) Descriptor() ([]byte, []int)

func (*Column) Marshal

func (m *Column) Marshal() (dAtA []byte, err error)

func (*Column) MarshalTo

func (m *Column) MarshalTo(dAtA []byte) (int, error)

func (*Column) MarshalToSizedBuffer

func (m *Column) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Column) ProtoMessage

func (*Column) ProtoMessage()

func (*Column) Reset

func (m *Column) Reset()

func (*Column) Size

func (m *Column) Size() (n int)

func (*Column) String

func (this *Column) String() string

func (*Column) Unmarshal

func (m *Column) Unmarshal(dAtA []byte) error

func (*Column) Validate

func (column *Column) Validate() (bool, []metav1.StatusCause)

func (*Column) ValidateColumn

func (column *Column) ValidateColumn() (bool, []metav1.StatusCause)

func (*Column) XXX_DiscardUnknown

func (m *Column) XXX_DiscardUnknown()

func (*Column) XXX_Marshal

func (m *Column) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Column) XXX_Merge

func (m *Column) XXX_Merge(src proto.Message)

func (*Column) XXX_Size

func (m *Column) XXX_Size() int

func (*Column) XXX_Unmarshal

func (m *Column) XXX_Unmarshal(b []byte) error

type ColumnHistogram added in v0.4.925

type ColumnHistogram struct {
	// The name of the column
	//+kubebuilder:validation:Optional
	Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	// Measure of drift for a column
	//+kubebuilder:validation:Optional
	Histogram catalog.HistogramData `json:"histogram,omitempty" protobuf:"bytes,2,opt,name=histogram"`
	// Measure of drift for this column
	//+kubebuilder:validation:Optional
	Metrics []catalog.Measurement `json:"metrics,omitempty" protobuf:"bytes,3,rep,name=metrics"`
	// true if drift was detected for this column
	//+kubebuilder:validation:Optional
	Drift *bool `json:"drift,omitempty" protobuf:"varint,4,opt,name=drift"`
}

func (*ColumnHistogram) DeepCopy added in v0.4.926

func (in *ColumnHistogram) DeepCopy() *ColumnHistogram

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnHistogram.

func (*ColumnHistogram) DeepCopyInto added in v0.4.926

func (in *ColumnHistogram) DeepCopyInto(out *ColumnHistogram)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ColumnHistogram) Descriptor added in v0.4.925

func (*ColumnHistogram) Descriptor() ([]byte, []int)

func (*ColumnHistogram) Marshal added in v0.4.925

func (m *ColumnHistogram) Marshal() (dAtA []byte, err error)

func (*ColumnHistogram) MarshalTo added in v0.4.925

func (m *ColumnHistogram) MarshalTo(dAtA []byte) (int, error)

func (*ColumnHistogram) MarshalToSizedBuffer added in v0.4.925

func (m *ColumnHistogram) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ColumnHistogram) ProtoMessage added in v0.4.925

func (*ColumnHistogram) ProtoMessage()

func (*ColumnHistogram) Reset added in v0.4.925

func (m *ColumnHistogram) Reset()

func (*ColumnHistogram) Size added in v0.4.925

func (m *ColumnHistogram) Size() (n int)

func (*ColumnHistogram) String added in v0.4.925

func (this *ColumnHistogram) String() string

func (*ColumnHistogram) Unmarshal added in v0.4.925

func (m *ColumnHistogram) Unmarshal(dAtA []byte) error

func (*ColumnHistogram) XXX_DiscardUnknown added in v0.4.925

func (m *ColumnHistogram) XXX_DiscardUnknown()

func (*ColumnHistogram) XXX_Marshal added in v0.4.925

func (m *ColumnHistogram) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ColumnHistogram) XXX_Merge added in v0.4.925

func (m *ColumnHistogram) XXX_Merge(src proto.Message)

func (*ColumnHistogram) XXX_Size added in v0.4.925

func (m *ColumnHistogram) XXX_Size() int

func (*ColumnHistogram) XXX_Unmarshal added in v0.4.925

func (m *ColumnHistogram) XXX_Unmarshal(b []byte) error

type ColumnSpec

type ColumnSpec struct {
	// If true this column is an empty spacer
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Spacer *bool `json:"spacer,omitempty" protobuf:"varint,1,opt,name=spacer"`
	// The width in columns, each row is divided into 12 columns
	// +kubebuilder:validation:Optional
	Width *int32 `json:"width,omitempty" protobuf:"varint,2,opt,name=width"`
	// The actual
	Content ComponentSpec `json:",omitempty" protobuf:"bytes,3,opt,name=content"`
}

The spec for a column in the row. A column can span multiple grid based columns

func (*ColumnSpec) DeepCopy

func (in *ColumnSpec) DeepCopy() *ColumnSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnSpec.

func (*ColumnSpec) DeepCopyInto

func (in *ColumnSpec) DeepCopyInto(out *ColumnSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ColumnSpec) Descriptor

func (*ColumnSpec) Descriptor() ([]byte, []int)

func (*ColumnSpec) Marshal

func (m *ColumnSpec) Marshal() (dAtA []byte, err error)

func (*ColumnSpec) MarshalTo

func (m *ColumnSpec) MarshalTo(dAtA []byte) (int, error)

func (*ColumnSpec) MarshalToSizedBuffer

func (m *ColumnSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ColumnSpec) ProtoMessage

func (*ColumnSpec) ProtoMessage()

func (*ColumnSpec) Reset

func (m *ColumnSpec) Reset()

func (*ColumnSpec) Size

func (m *ColumnSpec) Size() (n int)

func (*ColumnSpec) String

func (this *ColumnSpec) String() string

func (*ColumnSpec) Unmarshal

func (m *ColumnSpec) Unmarshal(dAtA []byte) error

func (*ColumnSpec) XXX_DiscardUnknown

func (m *ColumnSpec) XXX_DiscardUnknown()

func (*ColumnSpec) XXX_Marshal

func (m *ColumnSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ColumnSpec) XXX_Merge

func (m *ColumnSpec) XXX_Merge(src proto.Message)

func (*ColumnSpec) XXX_Size

func (m *ColumnSpec) XXX_Size() int

func (*ColumnSpec) XXX_Unmarshal

func (m *ColumnSpec) XXX_Unmarshal(b []byte) error

type ComponentSpec

type ComponentSpec struct {
	// +kubebuilder:validation:Optional
	Title *string `json:"title,omitempty" protobuf:"bytes,1,opt,name=title"`
	// +kubebuilder:validation:Optional
	SubTitle *string `json:"subtitle,omitempty" protobuf:"bytes,2,opt,name=subtitle"`
	// +kubebuilder:validation:Optional
	Footer        *string `json:"footer,omitempty" protobuf:"bytes,3,opt,name=footer"`
	ComponentView `json:",inline" protobuf:"bytes,4,opt,name=content"`
}

func (*ComponentSpec) DeepCopy

func (in *ComponentSpec) DeepCopy() *ComponentSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComponentSpec.

func (*ComponentSpec) DeepCopyInto

func (in *ComponentSpec) DeepCopyInto(out *ComponentSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ComponentSpec) Descriptor

func (*ComponentSpec) Descriptor() ([]byte, []int)

func (*ComponentSpec) Marshal

func (m *ComponentSpec) Marshal() (dAtA []byte, err error)

func (*ComponentSpec) MarshalTo

func (m *ComponentSpec) MarshalTo(dAtA []byte) (int, error)

func (*ComponentSpec) MarshalToSizedBuffer

func (m *ComponentSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ComponentSpec) ProtoMessage

func (*ComponentSpec) ProtoMessage()

func (*ComponentSpec) Reset

func (m *ComponentSpec) Reset()

func (*ComponentSpec) Size

func (m *ComponentSpec) Size() (n int)

func (*ComponentSpec) String

func (this *ComponentSpec) String() string

func (*ComponentSpec) Unmarshal

func (m *ComponentSpec) Unmarshal(dAtA []byte) error

func (*ComponentSpec) XXX_DiscardUnknown

func (m *ComponentSpec) XXX_DiscardUnknown()

func (*ComponentSpec) XXX_Marshal

func (m *ComponentSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ComponentSpec) XXX_Merge

func (m *ComponentSpec) XXX_Merge(src proto.Message)

func (*ComponentSpec) XXX_Size

func (m *ComponentSpec) XXX_Size() int

func (*ComponentSpec) XXX_Unmarshal

func (m *ComponentSpec) XXX_Unmarshal(b []byte) error

type ComponentView

type ComponentView struct {
	// +kubebuilder:validation:Optional
	Metric *MetricSpec `json:"metric,omitempty" protobuf:"bytes,1,opt,name=metric"`
	// +kubebuilder:validation:Optional
	Gauge *GaugeSpec `json:"gauge,omitempty" protobuf:"bytes,2,opt,name=gauge"`
	// +kubebuilder:validation:Optional
	Histogram *HistogramSpec `json:"histogram,omitempty" protobuf:"bytes,3,opt,name=histogram"`
	// +kubebuilder:validation:Optional
	Table *TableSpec `json:"table,omitempty" protobuf:"bytes,4,opt,name=table"`
	// +kubebuilder:validation:Optional
	LineChart *LineChartSpec `json:"lineChart,omitempty" protobuf:"bytes,5,opt,name=lineChart"`
	// +kubebuilder:validation:Optional
	BarChart *BarChartSpec `json:"barChart,omitempty" protobuf:"bytes,6,opt,name=barChart"`
	// +kubebuilder:validation:Optional
	ScatterChart *ScatterPlotSpec `json:"scatterPlot,omitempty" protobuf:"bytes,7,opt,name=scatterPlot"`
}

Represent the view part of a component. Only one part should be specified per component

func (*ComponentView) DeepCopy

func (in *ComponentView) DeepCopy() *ComponentView

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComponentView.

func (*ComponentView) DeepCopyInto

func (in *ComponentView) DeepCopyInto(out *ComponentView)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ComponentView) Descriptor

func (*ComponentView) Descriptor() ([]byte, []int)

func (*ComponentView) Marshal

func (m *ComponentView) Marshal() (dAtA []byte, err error)

func (*ComponentView) MarshalTo

func (m *ComponentView) MarshalTo(dAtA []byte) (int, error)

func (*ComponentView) MarshalToSizedBuffer

func (m *ComponentView) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ComponentView) ProtoMessage

func (*ComponentView) ProtoMessage()

func (*ComponentView) Reset

func (m *ComponentView) Reset()

func (*ComponentView) Size

func (m *ComponentView) Size() (n int)

func (*ComponentView) String

func (this *ComponentView) String() string

func (*ComponentView) Unmarshal

func (m *ComponentView) Unmarshal(dAtA []byte) error

func (*ComponentView) XXX_DiscardUnknown

func (m *ComponentView) XXX_DiscardUnknown()

func (*ComponentView) XXX_Marshal

func (m *ComponentView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ComponentView) XXX_Merge

func (m *ComponentView) XXX_Merge(src proto.Message)

func (*ComponentView) XXX_Size

func (m *ComponentView) XXX_Size() int

func (*ComponentView) XXX_Unmarshal

func (m *ComponentView) XXX_Unmarshal(b []byte) error

type Correlation

type Correlation struct {
	// The name of the feature on the X axis
	X string `json:"x" protobuf:"bytes,1,opt,name=x"`
	// The name of the feature on the Y axis
	Y string `json:"y" protobuf:"bytes,2,opt,name=y"`
	// The correlation value
	Value float64 `json:"value,omitempty" protobuf:"bytes,3,opt,name=value"`
	// How the value was calculated
	Method string `json:"method,omitempty" protobuf:"bytes,4,opt,name=method"`
}

Correlation records the correlation between two features in a Dataset

func (*Correlation) DeepCopy

func (in *Correlation) DeepCopy() *Correlation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Correlation.

func (*Correlation) DeepCopyInto

func (in *Correlation) DeepCopyInto(out *Correlation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Correlation) Descriptor

func (*Correlation) Descriptor() ([]byte, []int)

func (*Correlation) Marshal

func (m *Correlation) Marshal() (dAtA []byte, err error)

func (*Correlation) MarshalTo

func (m *Correlation) MarshalTo(dAtA []byte) (int, error)

func (*Correlation) MarshalToSizedBuffer

func (m *Correlation) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Correlation) ProtoMessage

func (*Correlation) ProtoMessage()

func (*Correlation) Reset

func (m *Correlation) Reset()

func (*Correlation) Size

func (m *Correlation) Size() (n int)

func (*Correlation) String

func (this *Correlation) String() string

func (*Correlation) Unmarshal

func (m *Correlation) Unmarshal(dAtA []byte) error

func (*Correlation) XXX_DiscardUnknown

func (m *Correlation) XXX_DiscardUnknown()

func (*Correlation) XXX_Marshal

func (m *Correlation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Correlation) XXX_Merge

func (m *Correlation) XXX_Merge(src proto.Message)

func (*Correlation) XXX_Size

func (m *Correlation) XXX_Size() int

func (*Correlation) XXX_Unmarshal

func (m *Correlation) XXX_Unmarshal(b []byte) error

type CorrelationSpec

type CorrelationSpec struct {
	// The minimum value of a computed correlation to be stored as a result
	// +kubebuilder:default:=50
	// +kubebuilder:validation:Optional
	Cutoff *float64 `json:"cutoff,omitempty" protobuf:"bytes,1,opt,name=cutoff"`
	// The method to be used when computing correlations
	// +kubebuilder:default:="pearson"
	// +kubebuilder:validation:Optional
	Method *string `json:"method,omitempty" protobuf:"bytes,2,opt,name=method"`
	// The number of top correlations to be included in the correlation results
	// +kubebuilder:default:=10
	// +kubebuilder:validation:Optional
	Top *int32 `json:"top,omitempty" protobuf:"varint,3,opt,name=top"`
}

CorrelationSpec specifies how the correlations between features in a Dataset should be computed

func (*CorrelationSpec) DeepCopy

func (in *CorrelationSpec) DeepCopy() *CorrelationSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CorrelationSpec.

func (*CorrelationSpec) DeepCopyInto

func (in *CorrelationSpec) DeepCopyInto(out *CorrelationSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*CorrelationSpec) Descriptor

func (*CorrelationSpec) Descriptor() ([]byte, []int)

func (*CorrelationSpec) Marshal

func (m *CorrelationSpec) Marshal() (dAtA []byte, err error)

func (*CorrelationSpec) MarshalTo

func (m *CorrelationSpec) MarshalTo(dAtA []byte) (int, error)

func (*CorrelationSpec) MarshalToSizedBuffer

func (m *CorrelationSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*CorrelationSpec) ProtoMessage

func (*CorrelationSpec) ProtoMessage()

func (*CorrelationSpec) Reset

func (m *CorrelationSpec) Reset()

func (*CorrelationSpec) Size

func (m *CorrelationSpec) Size() (n int)

func (*CorrelationSpec) String

func (this *CorrelationSpec) String() string

func (*CorrelationSpec) Unmarshal

func (m *CorrelationSpec) Unmarshal(dAtA []byte) error

func (*CorrelationSpec) XXX_DiscardUnknown

func (m *CorrelationSpec) XXX_DiscardUnknown()

func (*CorrelationSpec) XXX_Marshal

func (m *CorrelationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*CorrelationSpec) XXX_Merge

func (m *CorrelationSpec) XXX_Merge(src proto.Message)

func (*CorrelationSpec) XXX_Size

func (m *CorrelationSpec) XXX_Size() int

func (*CorrelationSpec) XXX_Unmarshal

func (m *CorrelationSpec) XXX_Unmarshal(b []byte) error

type CsvFileSpec

type CsvFileSpec struct {
	// The character used to separate fields (by default, a comma)
	// +kubebuilder:default:="comma"
	// +kubebuilder:validation:Optional
	Delimiter *Delimiter `json:"delimiter,omitempty" protobuf:"bytes,1,opt,name=delimiter"`
	// The character used for quotes (by default, a double quote ")
	// +kubebuilder:default:="double-quote"
	// +kubebuilder:validation:Optional
	QuoteChar QuoteChar `json:"quote,omitempty" protobuf:"bytes,3,opt,name=quote"`
	// The character used to escape the delimiter
	// +kubebuilder:default:="single-quote"
	// +kubebuilder:validation:Optional
	EscapeChar EscapeChar `json:"escapeChar,omitempty" protobuf:"bytes,4,opt,name=escapeChar"`
	// The comment character used to split comments off the end of lines (by default, a hashtag #)
	// +kubebuilder:default:="#"
	// +kubebuilder:validation:Optional
	CommentChars *string `json:"commentChars,omitempty" protobuf:"bytes,5,opt,name=commentChars"`
	// Indicates if a header is present in the file
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	Header *bool `json:"header,omitempty" protobuf:"varint,6,opt,name=header"`
	// The number of rows to skip from the top of the file
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	SkipRows *int32 `json:"skipRows,omitempty" protobuf:"varint,7,opt,name=skipRows"`
	// NullValues is a sequence of values to replace with NA.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	NullValues *string `json:"nullValues,omitempty" protobuf:"bytes,8,opt,name=nullValues"`
	// The unicode encoding of the file (e.g. 'utf-8' for UTF-8 encoded text)
	// +kubebuilder:default:="utf-8"
	// +kubebuilder:validation:Optional
	Encoding *catalog.FileEncoding `json:"encoding,omitempty" protobuf:"bytes,9,opt,name=encoding"`
	// The maximum number of rows to read
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MaxRows *int32 `json:"maxRows,omitempty" protobuf:"varint,10,opt,name=maxRows"`
	// Indicates if the read of the CSV file should fail if there are any errors
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	Strict *bool `json:"strict,omitempty" protobuf:"varint,11,opt,name=strict"`
	// The compression type, if the file is compressed
	// +kubebuilder:default:="none"
	// +kubebuilder:validation:Optional
	Compression *string `json:"compression,omitempty" protobuf:"bytes,12,opt,name=compression"`
	// Indicates if the file contains an index column
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	HasIndexColumn *bool `json:"hasIndexColumn,omitempty" protobuf:"varint,13,opt,name=hasIndexColumn"`
}

CsvFileSpec specifies the format of a CSV (comma-separated values) file

func (*CsvFileSpec) DeepCopy

func (in *CsvFileSpec) DeepCopy() *CsvFileSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CsvFileSpec.

func (*CsvFileSpec) DeepCopyInto

func (in *CsvFileSpec) DeepCopyInto(out *CsvFileSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*CsvFileSpec) Descriptor

func (*CsvFileSpec) Descriptor() ([]byte, []int)

func (*CsvFileSpec) Marshal

func (m *CsvFileSpec) Marshal() (dAtA []byte, err error)

func (*CsvFileSpec) MarshalTo

func (m *CsvFileSpec) MarshalTo(dAtA []byte) (int, error)

func (*CsvFileSpec) MarshalToSizedBuffer

func (m *CsvFileSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*CsvFileSpec) ProtoMessage

func (*CsvFileSpec) ProtoMessage()

func (*CsvFileSpec) Reset

func (m *CsvFileSpec) Reset()

func (*CsvFileSpec) Size

func (m *CsvFileSpec) Size() (n int)

func (*CsvFileSpec) String

func (this *CsvFileSpec) String() string

func (*CsvFileSpec) Unmarshal

func (m *CsvFileSpec) Unmarshal(dAtA []byte) error

func (*CsvFileSpec) XXX_DiscardUnknown

func (m *CsvFileSpec) XXX_DiscardUnknown()

func (*CsvFileSpec) XXX_Marshal

func (m *CsvFileSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*CsvFileSpec) XXX_Merge

func (m *CsvFileSpec) XXX_Merge(src proto.Message)

func (*CsvFileSpec) XXX_Size

func (m *CsvFileSpec) XXX_Size() int

func (*CsvFileSpec) XXX_Unmarshal

func (m *CsvFileSpec) XXX_Unmarshal(b []byte) error

type DataInputSpec added in v0.4.649

type DataInputSpec struct {
	// The location where the dataset is stored
	// +kubebuilder:validation:Optional
	Location *catalog.DataLocation `json:"location,omitempty" protobuf:"bytes,2,opt,name=location"`
	// The file format of the input file, if applicable
	// +kubebuilder:validation:Optional
	Format FlatFileFormatSpec `json:"format,omitempty" protobuf:"bytes,3,opt,name=format"`
}

DataInputSpec specifies the format and location of an input dataset

func (*DataInputSpec) DeepCopy added in v0.4.649

func (in *DataInputSpec) DeepCopy() *DataInputSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataInputSpec.

func (*DataInputSpec) DeepCopyInto added in v0.4.649

func (in *DataInputSpec) DeepCopyInto(out *DataInputSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataInputSpec) Descriptor added in v0.4.649

func (*DataInputSpec) Descriptor() ([]byte, []int)

func (*DataInputSpec) Marshal added in v0.4.649

func (m *DataInputSpec) Marshal() (dAtA []byte, err error)

func (*DataInputSpec) MarshalTo added in v0.4.649

func (m *DataInputSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataInputSpec) MarshalToSizedBuffer added in v0.4.649

func (m *DataInputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataInputSpec) ProtoMessage added in v0.4.649

func (*DataInputSpec) ProtoMessage()

func (*DataInputSpec) Reset added in v0.4.649

func (m *DataInputSpec) Reset()

func (*DataInputSpec) Size added in v0.4.649

func (m *DataInputSpec) Size() (n int)

func (*DataInputSpec) String added in v0.4.649

func (this *DataInputSpec) String() string

func (*DataInputSpec) Unmarshal added in v0.4.649

func (m *DataInputSpec) Unmarshal(dAtA []byte) error

func (*DataInputSpec) XXX_DiscardUnknown added in v0.4.649

func (m *DataInputSpec) XXX_DiscardUnknown()

func (*DataInputSpec) XXX_Marshal added in v0.4.649

func (m *DataInputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataInputSpec) XXX_Merge added in v0.4.649

func (m *DataInputSpec) XXX_Merge(src proto.Message)

func (*DataInputSpec) XXX_Size added in v0.4.649

func (m *DataInputSpec) XXX_Size() int

func (*DataInputSpec) XXX_Unmarshal added in v0.4.649

func (m *DataInputSpec) XXX_Unmarshal(b []byte) error

type DataOutputSpec

type DataOutputSpec struct {
	// DatasetName is the name of a new Dataset resource that will be created.
	// If empty, the system will save the dataset to a file
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// The location where the dataset will be stored
	// +kubebuilder:validation:Optional
	Location *catalog.DataLocation `json:"location,omitempty" protobuf:"bytes,2,opt,name=location"`
	// The format of the dataset, applicable if the output location is a flat-file
	// +kubebuilder:default:="csv"
	// +kubebuilder:validation:Optional
	Format *FlatFileType `json:"format,omitempty" protobuf:"bytes,3,opt,name=format"`
	// The update strategy for the dataset in the case that the output location already exists (i.e a database table)
	// +kubebuilder:default:="upsert"
	// +kubebuilder:validation:Optional
	Action *catalog.UpdateStrategy `json:"action,omitempty" protobuf:"bytes,4,opt,name=action"`
	// If true, the database table specified by Location will be created if it does not exist
	// +kubebuilder:validation:Optional
	CreateTableIfNotExist *bool `json:"createTableIfNotExist,omitempty" protobuf:"varint,5,opt,name=createTableIfNotExist"`
	// Indicates if the features (i.e. all the columns) of the input dataset, excluding the
	// feature which was predicted on, will be included in the output dataset
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	IncludeFeatures *bool `json:"includeFeatures,omitempty" protobuf:"varint,6,opt,name=includeFeatures"`
	// If true, SHAP values for each predicted row will be included as JSON as an additional column of the dataset
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	IncludeShapValues *bool `json:"IncludeShapValues,omitempty" protobuf:"varint,7,opt,name=includeShapValues"`
	// If true, an additional column will be added to the dataset which indicates if each predicted row was detected
	// to be an outlier. The predicting model must have been trained with an outlier model
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	DetectOutliers *bool `json:"detectOutliers,omitempty" protobuf:"varint,8,opt,name=detectOutliers"`
}

DataOutputSpec specifies the format, features, and output location of a transformed dataset

func (*DataOutputSpec) DeepCopy

func (in *DataOutputSpec) DeepCopy() *DataOutputSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataOutputSpec.

func (*DataOutputSpec) DeepCopyInto

func (in *DataOutputSpec) DeepCopyInto(out *DataOutputSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataOutputSpec) Descriptor

func (*DataOutputSpec) Descriptor() ([]byte, []int)

func (*DataOutputSpec) Marshal

func (m *DataOutputSpec) Marshal() (dAtA []byte, err error)

func (*DataOutputSpec) MarshalTo

func (m *DataOutputSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataOutputSpec) MarshalToSizedBuffer

func (m *DataOutputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataOutputSpec) ProtoMessage

func (*DataOutputSpec) ProtoMessage()

func (*DataOutputSpec) Reset

func (m *DataOutputSpec) Reset()

func (*DataOutputSpec) Size

func (m *DataOutputSpec) Size() (n int)

func (*DataOutputSpec) String

func (this *DataOutputSpec) String() string

func (*DataOutputSpec) Unmarshal

func (m *DataOutputSpec) Unmarshal(dAtA []byte) error

func (*DataOutputSpec) XXX_DiscardUnknown

func (m *DataOutputSpec) XXX_DiscardUnknown()

func (*DataOutputSpec) XXX_Marshal

func (m *DataOutputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataOutputSpec) XXX_Merge

func (m *DataOutputSpec) XXX_Merge(src proto.Message)

func (*DataOutputSpec) XXX_Size

func (m *DataOutputSpec) XXX_Size() int

func (*DataOutputSpec) XXX_Unmarshal

func (m *DataOutputSpec) XXX_Unmarshal(b []byte) error

type DataPipeline

type DataPipeline struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DataPipelineSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status DataPipelineStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:resource:path=datapipelines,singular=datapipeline,shortName="dpl",categories={data,modela} +kubebuilder:subresource:status +kubebuilder:storageversion +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description",description="" +kubebuilder:printcolumn:name="Schedule",type="string",JSONPath=".spec.schedule",description="" +kubebuilder:printcolumn:name="Last Run",type="date",JSONPath=".status.lastRun.at",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" DataPipeline represents the ETL flow from the data sources to a processed dataset, ready for training.

func (*DataPipeline) AddFinalizer

func (wr *DataPipeline) AddFinalizer()

func (*DataPipeline) CreateOrUpdateCond

func (wr *DataPipeline) CreateOrUpdateCond(cond metav1.Condition)

Merge or update condition

func (*DataPipeline) DeepCopy

func (in *DataPipeline) DeepCopy() *DataPipeline

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipeline.

func (*DataPipeline) DeepCopyInto

func (in *DataPipeline) DeepCopyInto(out *DataPipeline)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipeline) DeepCopyObject

func (in *DataPipeline) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataPipeline) Default

func (wr *DataPipeline) Default()

func (*DataPipeline) Descriptor

func (*DataPipeline) Descriptor() ([]byte, []int)

func (DataPipeline) GetCond

func (wr DataPipeline) GetCond(t string) metav1.Condition

func (DataPipeline) GetCondIdx

func (wr DataPipeline) GetCondIdx(t string) int

func (DataPipeline) GetObservedGeneration added in v0.6.252

func (datapipeline DataPipeline) GetObservedGeneration() int64

func (DataPipeline) GetStatus added in v0.6.252

func (datapipeline DataPipeline) GetStatus() proto.Message

func (DataPipeline) HasFinalizer

func (wr DataPipeline) HasFinalizer() bool

func (DataPipeline) IsReady

func (w DataPipeline) IsReady() bool

func (DataPipeline) ManifestURI added in v0.5.472

func (wr DataPipeline) ManifestURI() string

func (*DataPipeline) MarkFailed

func (in *DataPipeline) MarkFailed(err error)

func (*DataPipeline) MarkReady

func (in *DataPipeline) MarkReady()

func (*DataPipeline) Marshal

func (m *DataPipeline) Marshal() (dAtA []byte, err error)

func (*DataPipeline) MarshalTo

func (m *DataPipeline) MarshalTo(dAtA []byte) (int, error)

func (*DataPipeline) MarshalToSizedBuffer

func (m *DataPipeline) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipeline) ProtoMessage

func (*DataPipeline) ProtoMessage()

func (*DataPipeline) RemoveFinalizer

func (wr *DataPipeline) RemoveFinalizer()

func (*DataPipeline) Reset

func (m *DataPipeline) Reset()

func (DataPipeline) RootURI added in v0.5.472

func (wr DataPipeline) RootURI() string

func (*DataPipeline) SetObservedGeneration added in v0.6.252

func (datapipeline *DataPipeline) SetObservedGeneration(generation int64)

func (*DataPipeline) SetStatus added in v0.6.252

func (datapipeline *DataPipeline) SetStatus(status interface{})

func (*DataPipeline) SetUpdatedAt added in v0.6.252

func (datapipeline *DataPipeline) SetUpdatedAt(time *metav1.Time)

func (*DataPipeline) SetupWebhookWithManager

func (wr *DataPipeline) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DataPipeline) Size

func (m *DataPipeline) Size() (n int)

func (*DataPipeline) String

func (this *DataPipeline) String() string

func (*DataPipeline) Unmarshal

func (m *DataPipeline) Unmarshal(dAtA []byte) error

func (DataPipeline) ValidateCreate

func (wr DataPipeline) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (DataPipeline) ValidateDelete

func (wr DataPipeline) ValidateDelete() error

func (DataPipeline) ValidateUpdate

func (wr DataPipeline) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*DataPipeline) XXX_DiscardUnknown

func (m *DataPipeline) XXX_DiscardUnknown()

func (*DataPipeline) XXX_Marshal

func (m *DataPipeline) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipeline) XXX_Merge

func (m *DataPipeline) XXX_Merge(src proto.Message)

func (*DataPipeline) XXX_Size

func (m *DataPipeline) XXX_Size() int

func (*DataPipeline) XXX_Unmarshal

func (m *DataPipeline) XXX_Unmarshal(b []byte) error

type DataPipelineConditionType

type DataPipelineConditionType string

Condition of the data pipeline

type DataPipelineList

type DataPipelineList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []DataPipeline `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true DataPipelineList contain the list of DataPipeline

func (*DataPipelineList) DeepCopy

func (in *DataPipelineList) DeepCopy() *DataPipelineList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineList.

func (*DataPipelineList) DeepCopyInto

func (in *DataPipelineList) DeepCopyInto(out *DataPipelineList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineList) DeepCopyObject

func (in *DataPipelineList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataPipelineList) Descriptor

func (*DataPipelineList) Descriptor() ([]byte, []int)

func (*DataPipelineList) Marshal

func (m *DataPipelineList) Marshal() (dAtA []byte, err error)

func (*DataPipelineList) MarshalTo

func (m *DataPipelineList) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineList) MarshalToSizedBuffer

func (m *DataPipelineList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineList) ProtoMessage

func (*DataPipelineList) ProtoMessage()

func (*DataPipelineList) Reset

func (m *DataPipelineList) Reset()

func (*DataPipelineList) Size

func (m *DataPipelineList) Size() (n int)

func (*DataPipelineList) String

func (this *DataPipelineList) String() string

func (*DataPipelineList) Unmarshal

func (m *DataPipelineList) Unmarshal(dAtA []byte) error

func (*DataPipelineList) XXX_DiscardUnknown

func (m *DataPipelineList) XXX_DiscardUnknown()

func (*DataPipelineList) XXX_Marshal

func (m *DataPipelineList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineList) XXX_Merge

func (m *DataPipelineList) XXX_Merge(src proto.Message)

func (*DataPipelineList) XXX_Size

func (m *DataPipelineList) XXX_Size() int

func (*DataPipelineList) XXX_Unmarshal

func (m *DataPipelineList) XXX_Unmarshal(b []byte) error

type DataPipelineRun

type DataPipelineRun struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DataPipelineRunSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status DataPipelineRunStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:resource:path=datapipelineruns,singular=datapipelinerun,shortName="dpr",categories={data,modela,all} +kubebuilder:subresource:status +kubebuilder:storageversion +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Progress",type="string",JSONPath=".status.progress",priority=1 +kubebuilder:printcolumn:name="Pipeline",type="string",JSONPath=".spec.datapipelineName" +kubebuilder:printcolumn:name="StartedAt",type="date",JSONPath=".status.startTime",priority=1 +kubebuilder:printcolumn:name="CompletedAt",type="date",JSONPath=".status.completionTime",priority=1 +kubebuilder:printcolumn:name="Failure",type="string",JSONPath=".metadata.failureMessage" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp" DataPipelineRun represent one execution of the data pipeline

func (DataPipelineRun) Aborted

func (in DataPipelineRun) Aborted() bool

func (*DataPipelineRun) AddFinalizer

func (run *DataPipelineRun) AddFinalizer()

func (DataPipelineRun) CompletionAlert added in v0.4.601

func (run DataPipelineRun) CompletionAlert(notification catalog.NotificationSpec) *infra.Alert

Generate a dataset completion alert

func (*DataPipelineRun) CreateOrUpdateCond

func (run *DataPipelineRun) CreateOrUpdateCond(cond metav1.Condition)

Merge or update condition

func (*DataPipelineRun) DeepCopy

func (in *DataPipelineRun) DeepCopy() *DataPipelineRun

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRun.

func (*DataPipelineRun) DeepCopyInto

func (in *DataPipelineRun) DeepCopyInto(out *DataPipelineRun)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineRun) DeepCopyObject

func (in *DataPipelineRun) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataPipelineRun) Default

func (run *DataPipelineRun) Default()

func (*DataPipelineRun) Descriptor

func (*DataPipelineRun) Descriptor() ([]byte, []int)

func (DataPipelineRun) ErrorAlert added in v0.4.601

func (run DataPipelineRun) ErrorAlert(notification catalog.NotificationSpec, err error) *infra.Alert

func (DataPipelineRun) GetCond

func (run DataPipelineRun) GetCond(t string) metav1.Condition

func (DataPipelineRun) GetCondIdx

func (run DataPipelineRun) GetCondIdx(t string) int

func (DataPipelineRun) GetObservedGeneration added in v0.6.252

func (datapipelinerun DataPipelineRun) GetObservedGeneration() int64

func (DataPipelineRun) GetStatus added in v0.6.252

func (datapipelinerun DataPipelineRun) GetStatus() proto.Message

func (DataPipelineRun) HasFinalizer

func (run DataPipelineRun) HasFinalizer() bool

func (DataPipelineRun) IsCompleted

func (in DataPipelineRun) IsCompleted() bool

func (DataPipelineRun) IsFailed

func (in DataPipelineRun) IsFailed() bool

func (DataPipelineRun) IsReady

func (w DataPipelineRun) IsReady() bool

func (DataPipelineRun) IsRunning

func (in DataPipelineRun) IsRunning() bool

func (DataPipelineRun) IsSaved

func (w DataPipelineRun) IsSaved() bool

func (DataPipelineRun) ManifestURI added in v0.5.472

func (run DataPipelineRun) ManifestURI() string

func (*DataPipelineRun) MarkAborted added in v0.4.607

func (in *DataPipelineRun) MarkAborted(err error)

func (*DataPipelineRun) MarkComplete

func (in *DataPipelineRun) MarkComplete()

func (*DataPipelineRun) MarkFailed

func (in *DataPipelineRun) MarkFailed(err error)

func (*DataPipelineRun) MarkRunning

func (r *DataPipelineRun) MarkRunning()

func (*DataPipelineRun) Marshal

func (m *DataPipelineRun) Marshal() (dAtA []byte, err error)

func (*DataPipelineRun) MarshalTo

func (m *DataPipelineRun) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineRun) MarshalToSizedBuffer

func (m *DataPipelineRun) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (DataPipelineRun) Paused

func (in DataPipelineRun) Paused() bool

func (*DataPipelineRun) ProtoMessage

func (*DataPipelineRun) ProtoMessage()

func (*DataPipelineRun) RemoveFinalizer

func (run *DataPipelineRun) RemoveFinalizer()

func (*DataPipelineRun) Reset

func (m *DataPipelineRun) Reset()

func (DataPipelineRun) RootURI added in v0.5.472

func (run DataPipelineRun) RootURI() string

func (DataPipelineRun) RunStatus added in v0.4.614

func (run DataPipelineRun) RunStatus() *catalog.LastRunStatus

Return the state of the run as RunStatus

func (*DataPipelineRun) SetObservedGeneration added in v0.6.252

func (datapipelinerun *DataPipelineRun) SetObservedGeneration(generation int64)

func (*DataPipelineRun) SetStatus added in v0.6.252

func (datapipelinerun *DataPipelineRun) SetStatus(status interface{})

func (*DataPipelineRun) SetUpdatedAt added in v0.6.252

func (datapipelinerun *DataPipelineRun) SetUpdatedAt(time *metav1.Time)

func (*DataPipelineRun) SetupWebhookWithManager

func (wr *DataPipelineRun) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DataPipelineRun) Size

func (m *DataPipelineRun) Size() (n int)

func (DataPipelineRun) StatusString

func (run DataPipelineRun) StatusString() string

func (*DataPipelineRun) String

func (this *DataPipelineRun) String() string

func (*DataPipelineRun) Unmarshal

func (m *DataPipelineRun) Unmarshal(dAtA []byte) error

func (DataPipelineRun) ValidateCreate

func (run DataPipelineRun) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (DataPipelineRun) ValidateDelete

func (run DataPipelineRun) ValidateDelete() error

func (DataPipelineRun) ValidateUpdate

func (run DataPipelineRun) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*DataPipelineRun) XXX_DiscardUnknown

func (m *DataPipelineRun) XXX_DiscardUnknown()

func (*DataPipelineRun) XXX_Marshal

func (m *DataPipelineRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineRun) XXX_Merge

func (m *DataPipelineRun) XXX_Merge(src proto.Message)

func (*DataPipelineRun) XXX_Size

func (m *DataPipelineRun) XXX_Size() int

func (*DataPipelineRun) XXX_Unmarshal

func (m *DataPipelineRun) XXX_Unmarshal(b []byte) error

type DataPipelineRunCondition

type DataPipelineRunCondition struct {
	// Type of account condition.
	Type DataPipelineRunConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=DataPipelineRunConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,4,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,5,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,6,opt,name=message"`
}

DataPipelineRunCondition describes the state of a data processor run at a certain point.

func (*DataPipelineRunCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunCondition.

func (*DataPipelineRunCondition) DeepCopyInto

func (in *DataPipelineRunCondition) DeepCopyInto(out *DataPipelineRunCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineRunCondition) Descriptor

func (*DataPipelineRunCondition) Descriptor() ([]byte, []int)

func (*DataPipelineRunCondition) Marshal

func (m *DataPipelineRunCondition) Marshal() (dAtA []byte, err error)

func (*DataPipelineRunCondition) MarshalTo

func (m *DataPipelineRunCondition) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineRunCondition) MarshalToSizedBuffer

func (m *DataPipelineRunCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineRunCondition) ProtoMessage

func (*DataPipelineRunCondition) ProtoMessage()

func (*DataPipelineRunCondition) Reset

func (m *DataPipelineRunCondition) Reset()

func (*DataPipelineRunCondition) Size

func (m *DataPipelineRunCondition) Size() (n int)

func (*DataPipelineRunCondition) String

func (this *DataPipelineRunCondition) String() string

func (*DataPipelineRunCondition) Unmarshal

func (m *DataPipelineRunCondition) Unmarshal(dAtA []byte) error

func (*DataPipelineRunCondition) XXX_DiscardUnknown

func (m *DataPipelineRunCondition) XXX_DiscardUnknown()

func (*DataPipelineRunCondition) XXX_Marshal

func (m *DataPipelineRunCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineRunCondition) XXX_Merge

func (m *DataPipelineRunCondition) XXX_Merge(src proto.Message)

func (*DataPipelineRunCondition) XXX_Size

func (m *DataPipelineRunCondition) XXX_Size() int

func (*DataPipelineRunCondition) XXX_Unmarshal

func (m *DataPipelineRunCondition) XXX_Unmarshal(b []byte) error

type DataPipelineRunConditionType

type DataPipelineRunConditionType string

Condition on the dataset

type DataPipelineRunList

type DataPipelineRunList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []DataPipelineRun `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true DataPipelineRunList contains a list of DataPipelineRun

func (*DataPipelineRunList) DeepCopy

func (in *DataPipelineRunList) DeepCopy() *DataPipelineRunList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunList.

func (*DataPipelineRunList) DeepCopyInto

func (in *DataPipelineRunList) DeepCopyInto(out *DataPipelineRunList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineRunList) DeepCopyObject

func (in *DataPipelineRunList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataPipelineRunList) Descriptor

func (*DataPipelineRunList) Descriptor() ([]byte, []int)

func (*DataPipelineRunList) Marshal

func (m *DataPipelineRunList) Marshal() (dAtA []byte, err error)

func (*DataPipelineRunList) MarshalTo

func (m *DataPipelineRunList) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineRunList) MarshalToSizedBuffer

func (m *DataPipelineRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineRunList) ProtoMessage

func (*DataPipelineRunList) ProtoMessage()

func (*DataPipelineRunList) Reset

func (m *DataPipelineRunList) Reset()

func (*DataPipelineRunList) Size

func (m *DataPipelineRunList) Size() (n int)

func (*DataPipelineRunList) String

func (this *DataPipelineRunList) String() string

func (*DataPipelineRunList) Unmarshal

func (m *DataPipelineRunList) Unmarshal(dAtA []byte) error

func (*DataPipelineRunList) XXX_DiscardUnknown

func (m *DataPipelineRunList) XXX_DiscardUnknown()

func (*DataPipelineRunList) XXX_Marshal

func (m *DataPipelineRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineRunList) XXX_Merge

func (m *DataPipelineRunList) XXX_Merge(src proto.Message)

func (*DataPipelineRunList) XXX_Size

func (m *DataPipelineRunList) XXX_Size() int

func (*DataPipelineRunList) XXX_Unmarshal

func (m *DataPipelineRunList) XXX_Unmarshal(b []byte) error

type DataPipelineRunPhase

type DataPipelineRunPhase string
const (
	DataPipelineRunPhasePending   DataPipelineRunPhase = "Pending"
	DataPipelineRunPhaseRunning   DataPipelineRunPhase = "Running"
	DataPipelineRunPhaseCompleted DataPipelineRunPhase = "Completed"
	DataPipelineRunPhaseFailed    DataPipelineRunPhase = "Failed"
	DataPipelineRunPhaseAborted   DataPipelineRunPhase = "Aborted"
	DataPipelineRunPhasePaused    DataPipelineRunPhase = "Paused"
)

type DataPipelineRunSpec

type DataPipelineRunSpec struct {
	// The data product version of the run
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"`
	// The data product
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	DataPipelineName *string `json:"datapipelineName,omitempty" protobuf:"bytes,2,opt,name=datapipelineName"`
	// The owner of the run, set to the owner of the pipeline
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,4,opt,name=owner"`
	// Specify the resources for the data pipeline run
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,5,opt,name=resources"`
	// The priority of this data pipeline. The default is medium.
	// +kubebuilder:default:=medium
	// +kubebuilder:validation:Optional
	Priority *catalog.PriorityLevel `json:"priority,omitempty" protobuf:"bytes,6,opt,name=priority"`
	// Set to true to pause the pipeline run
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Paused *bool `json:"paused,omitempty" protobuf:"varint,7,opt,name=paused"`
	// Set to true to abort the  pipeline run
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Aborted *bool `json:"aborted,omitempty" protobuf:"varint,8,opt,name=aborted"`
	// The Lab where the data pipeline run.
	// +kubebuilder:validation:Optional
	LabRef v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,9,opt,name=labRef"`
	// The model class for this data pipeline run if the dataset was created by a model class
	// +kubebuilder:validation:Optional
	ModelClassName *string `json:"modelClassName,omitempty" protobuf:"bytes,10,opt,name=modelClassName"`
	// If this report was created by a data pipeline run, this is the run name
	// +kubebuilder:validation:Optional
	ModelClassRunName *string `json:"modelClassRunName,omitempty" protobuf:"bytes,11,opt,name=modelClassRunName"`
}

DataPipelineRunSpec defines the desired state of a schema

func (*DataPipelineRunSpec) DeepCopy

func (in *DataPipelineRunSpec) DeepCopy() *DataPipelineRunSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunSpec.

func (*DataPipelineRunSpec) DeepCopyInto

func (in *DataPipelineRunSpec) DeepCopyInto(out *DataPipelineRunSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineRunSpec) Descriptor

func (*DataPipelineRunSpec) Descriptor() ([]byte, []int)

func (*DataPipelineRunSpec) Marshal

func (m *DataPipelineRunSpec) Marshal() (dAtA []byte, err error)

func (*DataPipelineRunSpec) MarshalTo

func (m *DataPipelineRunSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineRunSpec) MarshalToSizedBuffer

func (m *DataPipelineRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineRunSpec) ProtoMessage

func (*DataPipelineRunSpec) ProtoMessage()

func (*DataPipelineRunSpec) Reset

func (m *DataPipelineRunSpec) Reset()

func (*DataPipelineRunSpec) Size

func (m *DataPipelineRunSpec) Size() (n int)

func (*DataPipelineRunSpec) String

func (this *DataPipelineRunSpec) String() string

func (*DataPipelineRunSpec) Unmarshal

func (m *DataPipelineRunSpec) Unmarshal(dAtA []byte) error

func (*DataPipelineRunSpec) XXX_DiscardUnknown

func (m *DataPipelineRunSpec) XXX_DiscardUnknown()

func (*DataPipelineRunSpec) XXX_Marshal

func (m *DataPipelineRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineRunSpec) XXX_Merge

func (m *DataPipelineRunSpec) XXX_Merge(src proto.Message)

func (*DataPipelineRunSpec) XXX_Size

func (m *DataPipelineRunSpec) XXX_Size() int

func (*DataPipelineRunSpec) XXX_Unmarshal

func (m *DataPipelineRunSpec) XXX_Unmarshal(b []byte) error

type DataPipelineRunStatus

type DataPipelineRunStatus struct {
	// RecipeRuns is the names of the recipe runs that occur during running of the pipeline.
	//+kubebuilder:validation:Optional
	RecipeRuns []string `json:"recipeRuns" protobuf:"bytes,1,rep,name=recipeRuns"`
	// the resulting dataset from the flow
	//+kubebuilder:validation:Optional
	Output catalog.DataLocation `json:"output" protobuf:"bytes,2,opt,name=output"`
	// the phase of the run
	// +kubebuilder:default:="Pending"
	//+kubebuilder:validation:Optional
	Phase DataPipelineRunPhase `json:"phase" protobuf:"bytes,3,opt,name=phase"`
	// CompletedAt is the end time of the pipeline
	// +kubebuilder:validation:Optional
	CompletedAt *metav1.Time `json:"completedAt,omitempty" protobuf:"bytes,5,opt,name=completedAt"`
	//ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,6,opt,name=observedGeneration"`

	// UpdateUpdateStrategy in case of terminal failure
	// Borrowed from cluster api controller
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,7,opt,name=failureReason"`

	// UpdateUpdateStrategy in case of terminal failure message
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,8,opt,name=failureMessage"`

	// Pipeline progress Progress in percent, the progress takes into account the different stages of the pipeline
	// +kubebuilder:validation:Optional
	Progress *int32 `json:"progress" protobuf:"varint,9,opt,name=progress"`
	// Holds the location of log paths
	//+kubebuilder:validation:Optional
	Logs catalog.Logs `json:"logs,,omitempty" protobuf:"bytes,10,opt,name=logs"`

	// Last time the object was updated
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,11,opt,name=updatedAt"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,12,rep,name=conditions"`
}

DataPipelineRunStatus defines the observed state of DataPipelineRun

func (*DataPipelineRunStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunStatus.

func (*DataPipelineRunStatus) DeepCopyInto

func (in *DataPipelineRunStatus) DeepCopyInto(out *DataPipelineRunStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineRunStatus) Descriptor

func (*DataPipelineRunStatus) Descriptor() ([]byte, []int)

func (*DataPipelineRunStatus) Marshal

func (m *DataPipelineRunStatus) Marshal() (dAtA []byte, err error)

func (*DataPipelineRunStatus) MarshalTo

func (m *DataPipelineRunStatus) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineRunStatus) MarshalToSizedBuffer

func (m *DataPipelineRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineRunStatus) ProtoMessage

func (*DataPipelineRunStatus) ProtoMessage()

func (*DataPipelineRunStatus) Reset

func (m *DataPipelineRunStatus) Reset()

func (*DataPipelineRunStatus) Size

func (m *DataPipelineRunStatus) Size() (n int)

func (*DataPipelineRunStatus) String

func (this *DataPipelineRunStatus) String() string

func (*DataPipelineRunStatus) Unmarshal

func (m *DataPipelineRunStatus) Unmarshal(dAtA []byte) error

func (*DataPipelineRunStatus) XXX_DiscardUnknown

func (m *DataPipelineRunStatus) XXX_DiscardUnknown()

func (*DataPipelineRunStatus) XXX_Marshal

func (m *DataPipelineRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineRunStatus) XXX_Merge

func (m *DataPipelineRunStatus) XXX_Merge(src proto.Message)

func (*DataPipelineRunStatus) XXX_Size

func (m *DataPipelineRunStatus) XXX_Size() int

func (*DataPipelineRunStatus) XXX_Unmarshal

func (m *DataPipelineRunStatus) XXX_Unmarshal(b []byte) error

type DataPipelineSpec

type DataPipelineSpec struct {
	// VersionName is the data product version of the data pipeline
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"`
	// The model class for this pipeline
	// +kubebuilder:validation:Optional
	ModelClassName *string `json:"modelClassName,omitempty" protobuf:"bytes,2,opt,name=modelClassName"`
	// Description of the data pipeline
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// DatasetSelector is used to select datasets for processing in the pipeline
	// +kubebuilder:validation:Optional
	DatasetSelector map[string]string `json:"datasetSelector,omitempty" protobuf:"bytes,4,opt,name=datasetSelector"`
	// RecipeOrder defines the list of recipes and the order they need to run
	// +kubebuilder:validation:Optional
	Recipes []RecipePartSpec `json:"recipes,omitempty" protobuf:"bytes,5,rep,name=recipes"`
	// The output definition
	// +kubebuilder:validation:Optional
	Output DataOutputSpec `json:"output,omitempty" protobuf:"bytes,6,opt,name=output"`
	// Schedule for running the pipeline
	// +kubebuilder:validation:Optional
	Schedule catalog.RunSchedule `json:"schedule,omitempty" protobuf:"bytes,7,opt,name=schedule"`
	// Specification for notification
	// +kubebuilder:validation:Optional
	Notification catalog.NotificationSpec `json:"notification,omitempty" protobuf:"bytes,8,opt,name=notification"`
	// Owner of this data pipeline
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,9,opt,name=owner"`
	// Resources are hardware resource req.
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,10,opt,name=resources"`
	// The priority of this data pipeline. The default is medium.
	// +kubebuilder:default:="medium"
	// +kubebuilder:validation:Optional
	Priority *catalog.PriorityLevel `json:"priority,omitempty" protobuf:"bytes,11,opt,name=priority"`
	// Set to true to pause the cron prediction
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Paused *bool `json:"paused,omitempty" protobuf:"varint,12,opt,name=paused"`
	// TTL for the data product run. Set to 0 if no garbage collected is needed.
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,13,opt,name=ttl"`
}

DataPipelineSpec defines the desired state of a DataPipeline

func (*DataPipelineSpec) DeepCopy

func (in *DataPipelineSpec) DeepCopy() *DataPipelineSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineSpec.

func (*DataPipelineSpec) DeepCopyInto

func (in *DataPipelineSpec) DeepCopyInto(out *DataPipelineSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineSpec) Descriptor

func (*DataPipelineSpec) Descriptor() ([]byte, []int)

func (*DataPipelineSpec) Marshal

func (m *DataPipelineSpec) Marshal() (dAtA []byte, err error)

func (*DataPipelineSpec) MarshalTo

func (m *DataPipelineSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineSpec) MarshalToSizedBuffer

func (m *DataPipelineSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineSpec) ProtoMessage

func (*DataPipelineSpec) ProtoMessage()

func (*DataPipelineSpec) Reset

func (m *DataPipelineSpec) Reset()

func (*DataPipelineSpec) Size

func (m *DataPipelineSpec) Size() (n int)

func (*DataPipelineSpec) String

func (this *DataPipelineSpec) String() string

func (*DataPipelineSpec) Unmarshal

func (m *DataPipelineSpec) Unmarshal(dAtA []byte) error

func (*DataPipelineSpec) XXX_DiscardUnknown

func (m *DataPipelineSpec) XXX_DiscardUnknown()

func (*DataPipelineSpec) XXX_Marshal

func (m *DataPipelineSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineSpec) XXX_Merge

func (m *DataPipelineSpec) XXX_Merge(src proto.Message)

func (*DataPipelineSpec) XXX_Size

func (m *DataPipelineSpec) XXX_Size() int

func (*DataPipelineSpec) XXX_Unmarshal

func (m *DataPipelineSpec) XXX_Unmarshal(b []byte) error

type DataPipelineStatus

type DataPipelineStatus struct {
	//ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,2,opt,name=updatedAt"`
	// Last run is the last time a data pipeline run was created
	//+kubebuilder:validation:Optional
	Schedule catalog.RunScheduleStatus `json:"schedule,omitempty" protobuf:"bytes,3,opt,name=schedule"`
	// The name of the last data pipeline count
	//+kubebuilder:validation:Optional
	LastRunName string `json:"lastRunName,omitempty" protobuf:"bytes,4,opt,name=lastRunName"`
	// The number of datapipline runs
	//+kubebuilder:validation:Optional
	RunsCount int32 `json:"runsCount,omitempty" protobuf:"bytes,5,opt,name=runsCount"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,6,rep,name=conditions"`
}

DataPipelineStatus is the observed state of the DataPipeline object.

func (*DataPipelineStatus) DeepCopy

func (in *DataPipelineStatus) DeepCopy() *DataPipelineStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineStatus.

func (*DataPipelineStatus) DeepCopyInto

func (in *DataPipelineStatus) DeepCopyInto(out *DataPipelineStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineStatus) Descriptor

func (*DataPipelineStatus) Descriptor() ([]byte, []int)

func (*DataPipelineStatus) Marshal

func (m *DataPipelineStatus) Marshal() (dAtA []byte, err error)

func (*DataPipelineStatus) MarshalTo

func (m *DataPipelineStatus) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineStatus) MarshalToSizedBuffer

func (m *DataPipelineStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineStatus) ProtoMessage

func (*DataPipelineStatus) ProtoMessage()

func (*DataPipelineStatus) Reset

func (m *DataPipelineStatus) Reset()

func (*DataPipelineStatus) Size

func (m *DataPipelineStatus) Size() (n int)

func (*DataPipelineStatus) String

func (this *DataPipelineStatus) String() string

func (*DataPipelineStatus) Unmarshal

func (m *DataPipelineStatus) Unmarshal(dAtA []byte) error

func (*DataPipelineStatus) XXX_DiscardUnknown

func (m *DataPipelineStatus) XXX_DiscardUnknown()

func (*DataPipelineStatus) XXX_Marshal

func (m *DataPipelineStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineStatus) XXX_Merge

func (m *DataPipelineStatus) XXX_Merge(src proto.Message)

func (*DataPipelineStatus) XXX_Size

func (m *DataPipelineStatus) XXX_Size() int

func (*DataPipelineStatus) XXX_Unmarshal

func (m *DataPipelineStatus) XXX_Unmarshal(b []byte) error

type DataProduct

type DataProduct struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DataProductSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status DataProductStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:resource:path=dataproducts,shortName=prod,singular=dataproduct,shortName="prod",categories={data,modela,all} +kubebuilder:subresource:status +kubebuilder:storageversion +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Task",type="string",JSONPath=".spec.task" +kubebuilder:printcolumn:name="Public",type="boolean",JSONPath=".spec.public" +kubebuilder:printcolumn:name="Errors",type="integer",JSONPath=".status.errorAlertsCount" +kubebuilder:printcolumn:name="Data Sources",type="integer",JSONPath=".status.datasourcesCount",priority=1 +kubebuilder:printcolumn:name="Datasets",type="integer",JSONPath=".status.datasetsCount",priority=1 +kubebuilder:printcolumn:name="Studies",type="integer",JSONPath=".status.studiesCount",priority=1 +kubebuilder:printcolumn:name="ModelsCount",type="integer",JSONPath=".status.modelsCount",priority=1 +kubebuilder:printcolumn:name="Predictors",type="integer",JSONPath=".status.predictorsCount",priority=1 +kubebuilder:printcolumn:name="Apps",type="integer",JSONPath=".status.appsCount",priority=1 +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" DataProduct represents a single DataProduct namespace, which contains all non-infrastructure resources. Additionally, it specifies default parameters for resources to be created under the namespace, such as workload class and storage location

func (*DataProduct) AddFinalizer

func (product *DataProduct) AddFinalizer()

func (DataProduct) CreateNamespace

func (product DataProduct) CreateNamespace() *v1.Namespace

func (*DataProduct) CreateOrUpdateCondition added in v0.6.310

func (product *DataProduct) CreateOrUpdateCondition(cond metav1.Condition)

Merge or update condition

func (DataProduct) DataProductRole added in v0.6.310

func (product DataProduct) DataProductRole() *rbacv1.Role

func (DataProduct) DataProductRoleBinding added in v0.6.310

func (product DataProduct) DataProductRoleBinding() *rbacv1.RoleBinding

func (DataProduct) DataProductServiceAccount added in v0.6.310

func (product DataProduct) DataProductServiceAccount() *v1.ServiceAccount

func (*DataProduct) DeepCopy

func (in *DataProduct) DeepCopy() *DataProduct

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProduct.

func (*DataProduct) DeepCopyInto

func (in *DataProduct) DeepCopyInto(out *DataProduct)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProduct) DeepCopyObject

func (in *DataProduct) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataProduct) Default

func (product *DataProduct) Default()

No defaults in this current release

func (*DataProduct) Descriptor

func (*DataProduct) Descriptor() ([]byte, []int)

func (DataProduct) GetCondition added in v0.6.310

func (product DataProduct) GetCondition(t DataProductConditionType) metav1.Condition

func (DataProduct) GetConditionIndex added in v0.6.310

func (product DataProduct) GetConditionIndex(t string) int

func (DataProduct) GetObservedGeneration added in v0.6.252

func (dataproduct DataProduct) GetObservedGeneration() int64

func (*DataProduct) GetRolesForAccount added in v0.4.547

func (product *DataProduct) GetRolesForAccount(account *infra.Account) []string

func (DataProduct) GetStatus added in v0.6.252

func (dataproduct DataProduct) GetStatus() proto.Message

func (DataProduct) HasFinalizer

func (product DataProduct) HasFinalizer() bool

func (DataProduct) IsClassification

func (product DataProduct) IsClassification() bool

func (*DataProduct) MarkNamespaceNotReady added in v0.6.310

func (dataproduct *DataProduct) MarkNamespaceNotReady(reason string, message string)

func (*DataProduct) MarkNamespaceReady added in v0.6.310

func (dataproduct *DataProduct) MarkNamespaceReady()

func (*DataProduct) MarkRbacNotReady added in v0.6.310

func (dataproduct *DataProduct) MarkRbacNotReady(reason string, message string)

func (*DataProduct) MarkRbacReady added in v0.6.310

func (dataproduct *DataProduct) MarkRbacReady()

func (*DataProduct) Marshal

func (m *DataProduct) Marshal() (dAtA []byte, err error)

func (*DataProduct) MarshalTo

func (m *DataProduct) MarshalTo(dAtA []byte) (int, error)

func (*DataProduct) MarshalToSizedBuffer

func (m *DataProduct) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (DataProduct) NamespaceReady added in v0.6.310

func (product DataProduct) NamespaceReady() bool

func (DataProduct) PrefixArchiveURI added in v0.5.472

func (product DataProduct) PrefixArchiveURI(uri string) string

func (DataProduct) PrefixDepotURI added in v0.5.472

func (product DataProduct) PrefixDepotURI(uri string) string

func (DataProduct) PrefixLiveURI added in v0.5.472

func (product DataProduct) PrefixLiveURI(uri string) string

func (*DataProduct) ProtoMessage

func (*DataProduct) ProtoMessage()

func (*DataProduct) RemoveFinalizer

func (product *DataProduct) RemoveFinalizer()

func (*DataProduct) Reset

func (m *DataProduct) Reset()

func (*DataProduct) SetObservedGeneration added in v0.6.252

func (dataproduct *DataProduct) SetObservedGeneration(generation int64)

func (*DataProduct) SetStatus added in v0.6.252

func (dataproduct *DataProduct) SetStatus(status interface{})

func (*DataProduct) SetUpdatedAt added in v0.6.252

func (dataproduct *DataProduct) SetUpdatedAt(time *metav1.Time)

func (*DataProduct) SetupWebhookWithManager

func (product *DataProduct) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DataProduct) Size

func (m *DataProduct) Size() (n int)

func (*DataProduct) String

func (this *DataProduct) String() string

func (*DataProduct) Unmarshal

func (m *DataProduct) Unmarshal(dAtA []byte) error

func (DataProduct) ValidateCreate

func (product DataProduct) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (DataProduct) ValidateDelete

func (product DataProduct) ValidateDelete() error

func (DataProduct) ValidateUpdate

func (product DataProduct) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*DataProduct) XXX_DiscardUnknown

func (m *DataProduct) XXX_DiscardUnknown()

func (*DataProduct) XXX_Marshal

func (m *DataProduct) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProduct) XXX_Merge

func (m *DataProduct) XXX_Merge(src proto.Message)

func (*DataProduct) XXX_Size

func (m *DataProduct) XXX_Size() int

func (*DataProduct) XXX_Unmarshal

func (m *DataProduct) XXX_Unmarshal(b []byte) error

func (DataProduct) YamlURI added in v0.5.472

func (product DataProduct) YamlURI() string

type DataProductConditionType

type DataProductConditionType string
const (
	DataProductNamespaceReady DataProductConditionType = "NamespaceReady"
	DataProductRbacReady      DataProductConditionType = "RbacReady"
)

type DataProductList

type DataProductList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []DataProduct `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true DataProductList contains a list of Data Products

func (*DataProductList) DeepCopy

func (in *DataProductList) DeepCopy() *DataProductList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductList.

func (*DataProductList) DeepCopyInto

func (in *DataProductList) DeepCopyInto(out *DataProductList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductList) DeepCopyObject

func (in *DataProductList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataProductList) Descriptor

func (*DataProductList) Descriptor() ([]byte, []int)

func (*DataProductList) Marshal

func (m *DataProductList) Marshal() (dAtA []byte, err error)

func (*DataProductList) MarshalTo

func (m *DataProductList) MarshalTo(dAtA []byte) (int, error)

func (*DataProductList) MarshalToSizedBuffer

func (m *DataProductList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductList) ProtoMessage

func (*DataProductList) ProtoMessage()

func (*DataProductList) Reset

func (m *DataProductList) Reset()

func (*DataProductList) Size

func (m *DataProductList) Size() (n int)

func (*DataProductList) String

func (this *DataProductList) String() string

func (*DataProductList) Unmarshal

func (m *DataProductList) Unmarshal(dAtA []byte) error

func (*DataProductList) XXX_DiscardUnknown

func (m *DataProductList) XXX_DiscardUnknown()

func (*DataProductList) XXX_Marshal

func (m *DataProductList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductList) XXX_Merge

func (m *DataProductList) XXX_Merge(src proto.Message)

func (*DataProductList) XXX_Size

func (m *DataProductList) XXX_Size() int

func (*DataProductList) XXX_Unmarshal

func (m *DataProductList) XXX_Unmarshal(b []byte) error

type DataProductSpec

type DataProductSpec struct {
	// The name of the Account which created the object, which exists in the same tenant as the object
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// Public indicates if the Data Product can be accessed without any permissions.
	// If enabled, all resources and verbs will be accessible
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Public *bool `json:"public,omitempty" protobuf:"varint,2,opt,name=public"`
	// The reference to the Tenant of the Data Product (which must equal the namespace of the Data Product)
	// +kubebuilder:validation:Optional
	TenantRef *v1.ObjectReference `json:"tenantRef,omitempty" protobuf:"bytes,3,opt,name=tenantRef"`
	// GitLocation specifies the location of a Git repository where resources under the Data Product will be saved as YAML
	// +kubebuilder:validation:Optional
	GitLocation GitLocation `json:"gitLocation,omitempty" protobuf:"bytes,4,opt,name=gitLocation"`
	// CacheBucketName specifies the name of the Virtual Bucket which workloads for resources under the Data Product
	// will use internally for caching workloads. Setting CacheBucketName to a valid Virtual Bucket is highly
	// recommended for stability and performance
	// +kubebuilder:validation:Required
	// +required
	CacheBucketName string `json:"cacheBucketName,omitempty" protobuf:"bytes,6,opt,name=cacheBucketName"`
	// The name of the Lab which will be used by all training and data-related workloads, in the case
	// that the resource creating the workload does not specify a Lab
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:Optional
	DefaultLabName *string `json:"defaultLabName" protobuf:"bytes,7,opt,name=defaultLabName"`
	// The name of the Serving Site which will be used by all inference-related workloads, in the case
	// that the resource creating the workload does not specify a Serving Site
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	DefaultServingSiteName *string `json:"defaultServingSiteName" protobuf:"bytes,8,opt,name=defaultServingSiteName"`
	// The name of the Virtual Bucket that resources under the Data Product will use by default
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	DefaultBucketName *string `json:"defaultBucketName,omitempty" protobuf:"bytes,9,opt,name=defaultBucketName"`
	// Task specifies the default machine learning task of the Data Product (classification, regression, etc.)
	// +kubebuilder:validation:Optional
	Task *catalog.MLTask `json:"task,omitempty" protobuf:"bytes,10,opt,name=task"`
	// Subtask specifies the default subtask relevant to the primary task (text classification, image object detection, etc.)
	// +kubebuilder:default:=none
	// +kubebuilder:validation:Optional
	SubTask *catalog.MLSubtask `json:"subtask,omitempty" protobuf:"bytes,11,opt,name=subtask"`
	// User-provided description of the object
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,12,opt,name=description"`
	// The default notification specification for all resources under the DataProduct
	// +kubebuilder:validation:Optional
	Notification *catalog.NotificationSpec `json:"notification,omitempty" protobuf:"bytes,13,opt,name=notification"`
	// The default resource allocation for model training and data workloads that takes place under the DataProduct
	// +kubebuilder:validation:Optional
	DefaultTrainingResources catalog.ResourceSpec `json:"trainingResources,omitempty" protobuf:"bytes,14,opt,name=trainingResources"`
	// The default resource allocation for model serving workloads that takes place under the DataProduct
	// +kubebuilder:validation:Optional
	DefaultServingResources catalog.ResourceSpec `json:"servingResources,omitempty" protobuf:"bytes,15,opt,name=servingResources"`
	// The default priority level assigned to Jobs created by resources under the Data Product
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:="medium"
	DefaultPriority *catalog.PriorityLevel `json:"priority,omitempty" protobuf:"bytes,16,opt,name=priority"`
	// The implementation-specific color assigned to the Data Product, for visual purposes only
	// +kubebuilder:default:="none"
	// +kubebuilder:validation:Optional
	Color *catalog.Color `json:"color,omitempty" protobuf:"bytes,17,opt,name=color"`
	// Approval specifies the default model approval requirements
	// +kubebuilder:validation:Optional
	Approval *ApprovalSpec `json:"approval,omitempty" protobuf:"bytes,18,opt,name=approval"`
	// Permissions defines the set of permissions applied to each Account when accessing resources within the Data Product
	// +kubebuilder:validation:Optional
	Permissions catalog.PermissionsSpec `json:"permissions,omitempty" protobuf:"bytes,19,opt,name=permissions"`
	// Tags contain user-defined tags associated with the Data Product
	// +kubebuilder:validation:Optional
	Tags []string `json:"tags,omitempty" protobuf:"bytes,20,opt,name=tags"`
}

DataProductSpec defines the desired state of the DataProduct

func (*DataProductSpec) DeepCopy

func (in *DataProductSpec) DeepCopy() *DataProductSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductSpec.

func (*DataProductSpec) DeepCopyInto

func (in *DataProductSpec) DeepCopyInto(out *DataProductSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductSpec) Descriptor

func (*DataProductSpec) Descriptor() ([]byte, []int)

func (*DataProductSpec) Marshal

func (m *DataProductSpec) Marshal() (dAtA []byte, err error)

func (*DataProductSpec) MarshalTo

func (m *DataProductSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataProductSpec) MarshalToSizedBuffer

func (m *DataProductSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductSpec) ProtoMessage

func (*DataProductSpec) ProtoMessage()

func (*DataProductSpec) Reset

func (m *DataProductSpec) Reset()

func (*DataProductSpec) Size

func (m *DataProductSpec) Size() (n int)

func (*DataProductSpec) String

func (this *DataProductSpec) String() string

func (*DataProductSpec) Unmarshal

func (m *DataProductSpec) Unmarshal(dAtA []byte) error

func (*DataProductSpec) XXX_DiscardUnknown

func (m *DataProductSpec) XXX_DiscardUnknown()

func (*DataProductSpec) XXX_Marshal

func (m *DataProductSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductSpec) XXX_Merge

func (m *DataProductSpec) XXX_Merge(src proto.Message)

func (*DataProductSpec) XXX_Size

func (m *DataProductSpec) XXX_Size() int

func (*DataProductSpec) XXX_Unmarshal

func (m *DataProductSpec) XXX_Unmarshal(b []byte) error

type DataProductStatus

type DataProductStatus struct {
	// ObservedGeneration is the last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// The last time the object was updated
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,2,opt,name=updatedAt"`
	// The number of DataSource resources that exist under the namespace
	//+kubebuilder:validation:Optional
	DatasourcesCount int32 `json:"datasourcesCount,omitempty" protobuf:"varint,3,opt,name=datasourcesCount"`
	// The number of Dataset resources that exist under the namespace
	//+kubebuilder:validation:Optional
	DatasetsCount int32 `json:"datasetsCount,omitempty" protobuf:"varint,4,opt,name=datasetsCount"`
	// The number of DataPipeline resources that exist under the namespace
	//+kubebuilder:validation:Optional
	DataPipelinesCount int32 `json:"dataPipelineCount,omitempty" protobuf:"varint,5,opt,name=dataPipelineCount"`
	// The number of DataPipelineRun resources that exist under the namespace
	//+kubebuilder:validation:Optional
	DataPipelineRunsCount int32 `json:"dataPipelineRunsCount,omitempty" protobuf:"varint,6,opt,name=totalDataPipelineRuns"`
	// The number of Study resources that exist under the namespace
	//+kubebuilder:validation:Optional
	StudiesCount int32 `json:"studiesCount,omitempty" protobuf:"varint,7,opt,name=studiesCount"`
	// The number of Model resources that exist under the namespace
	//+kubebuilder:validation:Optional
	ModelsCount int32 `json:"modelsCount,omitempty" protobuf:"varint,8,opt,name=modelsCount"`
	// The number of Predictor resources that exist under the namespace
	//+kubebuilder:validation:Optional
	PredictorsCount int32 `json:"predictorsCount,omitempty" protobuf:"varint,11,opt,name=predictorsCount"`
	// The number of DataApp resources that exist under the namespace
	//+kubebuilder:validation:Optional
	DataAppsCount int32 `json:"dataAppsCount,omitempty" protobuf:"varint,13,opt,name=dataAppsCount"`
	// The number of BatchPrediction resources that exist under the namespace
	//+kubebuilder:validation:Optional
	PredictionsCount int32 `json:"predictionsCount,omitempty" protobuf:"varint,14,opt,name=predictionsCount"`
	// The number of informative alerts produced under the namespace
	//+kubebuilder:validation:Optional
	InfoAlertsCount int32 `json:"infoAlertsCount,omitempty" protobuf:"varint,15,opt,name=infoAlertsCount"`
	// The number of error alerts produced under the namespace
	//+kubebuilder:validation:Optional
	ErrorsAlertsCount int32 `json:"errorAlertsCount,omitempty" protobuf:"varint,16,opt,name=errorAlertsCount"`
	// Count the number of model classes
	//+kubebuilder:validation:Optional
	ModelClassesCount int32 `json:"modelClassesCount,omitempty" protobuf:"varint,17,opt,name=modelClassesCount"`
	// In the case of failure, the DataProduct resource controller will set this field with a failure reason
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,18,opt,name=failureReason"`
	// In the case of failure, the DataProduct resource controller will set this field with a failure message
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,19,opt,name=failureMessage"`
	// The name of the DataProductVersion which currently represents the latest version of the DataProduct.
	// Newly-created resources will be instantiated with this version by default
	BaselineVersion *string `json:"baselineVersion,omitempty" protobuf:"bytes,20,opt,name=baselineVersion"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,21,rep,name=conditions"`
}

DataProductStatus defines the observed state of DataProduct

func (*DataProductStatus) DeepCopy

func (in *DataProductStatus) DeepCopy() *DataProductStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductStatus.

func (*DataProductStatus) DeepCopyInto

func (in *DataProductStatus) DeepCopyInto(out *DataProductStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductStatus) Descriptor

func (*DataProductStatus) Descriptor() ([]byte, []int)

func (*DataProductStatus) Marshal

func (m *DataProductStatus) Marshal() (dAtA []byte, err error)

func (*DataProductStatus) MarshalTo

func (m *DataProductStatus) MarshalTo(dAtA []byte) (int, error)

func (*DataProductStatus) MarshalToSizedBuffer

func (m *DataProductStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductStatus) ProtoMessage

func (*DataProductStatus) ProtoMessage()

func (*DataProductStatus) Reset

func (m *DataProductStatus) Reset()

func (*DataProductStatus) Size

func (m *DataProductStatus) Size() (n int)

func (*DataProductStatus) String

func (this *DataProductStatus) String() string

func (*DataProductStatus) Unmarshal

func (m *DataProductStatus) Unmarshal(dAtA []byte) error

func (*DataProductStatus) XXX_DiscardUnknown

func (m *DataProductStatus) XXX_DiscardUnknown()

func (*DataProductStatus) XXX_Marshal

func (m *DataProductStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductStatus) XXX_Merge

func (m *DataProductStatus) XXX_Merge(src proto.Message)

func (*DataProductStatus) XXX_Size

func (m *DataProductStatus) XXX_Size() int

func (*DataProductStatus) XXX_Unmarshal

func (m *DataProductStatus) XXX_Unmarshal(b []byte) error

type DataSource

type DataSource struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DataSourceSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status DataSourceStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:resource:path=datasources,singular=datasource,shortName="dsrc",categories={data,modela,all} +kubebuilder:storageversion +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Format",type="string",JSONPath=".spec.flatfile.fileType" +kubebuilder:printcolumn:name="Type",type="string",JSONPath=".spec.datasetType" +kubebuilder:printcolumn:name="Task",type="string",JSONPath=".spec.task" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp" DataSource defines the specification for the file format and column-level schema of data to be used within Modela

func ParseDataSourceYaml added in v0.6.252

func ParseDataSourceYaml(content []byte) (*DataSource, error)

func (DataSource) ActiveColumns

func (datasource DataSource) ActiveColumns() (string, error)

func (*DataSource) AddColumn

func (datasource *DataSource) AddColumn(
	name string,
	dtype catalog.DataType,
	dformat catalog.DataDomain,
	Ignore bool,
	Target bool,
	Nullable bool)

func (*DataSource) AddFinalizer

func (datasource *DataSource) AddFinalizer()

func (DataSource) CountActiveAttributes

func (datasource DataSource) CountActiveAttributes() int

CountActiveAttributes counts the number of attributes that we should not ignore

func (DataSource) CountTargetAttributes

func (datasource DataSource) CountTargetAttributes() int

Count the number of attributes that are mark as targets

func (*DataSource) CreateOrUpdateCond

func (datasource *DataSource) CreateOrUpdateCond(cond metav1.Condition)

Merge or update condition Merge or update condition

func (*DataSource) DeepCopy

func (in *DataSource) DeepCopy() *DataSource

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSource.

func (*DataSource) DeepCopyInto

func (in *DataSource) DeepCopyInto(out *DataSource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataSource) DeepCopyObject

func (in *DataSource) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataSource) Default

func (datasource *DataSource) Default()

func (*DataSource) Descriptor

func (*DataSource) Descriptor() ([]byte, []int)

func (DataSource) GetCond

func (datasource DataSource) GetCond(t string) metav1.Condition

func (DataSource) GetCondIdx

func (datasource DataSource) GetCondIdx(t string) int

func (DataSource) GetObservedGeneration added in v0.6.252

func (datasource DataSource) GetObservedGeneration() int64

func (DataSource) GetStatus added in v0.6.252

func (datasource DataSource) GetStatus() proto.Message

func (DataSource) HasFinalizer

func (datasource DataSource) HasFinalizer() bool

func (*DataSource) HaveValidationRules added in v0.4.654

func (datasource *DataSource) HaveValidationRules() bool

func (DataSource) InferTask

func (datasource DataSource) InferTask() catalog.MLTask

func (DataSource) IsReady

func (datasource DataSource) IsReady() bool

func (DataSource) ManifestURI added in v0.5.472

func (datasource DataSource) ManifestURI() string

func (*DataSource) MarkFieldAsTarget

func (datasource *DataSource) MarkFieldAsTarget(target string)

func (*DataSource) MarkLastFieldAsTarget

func (datasource *DataSource) MarkLastFieldAsTarget()

func (*DataSource) MarkReady

func (datasource *DataSource) MarkReady()

func (*DataSource) Marshal

func (m *DataSource) Marshal() (dAtA []byte, err error)

func (*DataSource) MarshalTo

func (m *DataSource) MarshalTo(dAtA []byte) (int, error)

func (*DataSource) MarshalToSizedBuffer

func (m *DataSource) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataSource) ProtoMessage

func (*DataSource) ProtoMessage()

func (*DataSource) RemoveFinalizer

func (datasource *DataSource) RemoveFinalizer()

func (*DataSource) Reset

func (m *DataSource) Reset()

func (DataSource) RootURI added in v0.5.472

func (datasource DataSource) RootURI() string

func (*DataSource) SetObservedGeneration added in v0.6.252

func (datasource *DataSource) SetObservedGeneration(generation int64)

func (*DataSource) SetStatus added in v0.6.252

func (datasource *DataSource) SetStatus(status interface{})

func (*DataSource) SetUpdatedAt added in v0.6.252

func (datasource *DataSource) SetUpdatedAt(time *metav1.Time)

func (*DataSource) SetupWebhookWithManager

func (datasource *DataSource) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DataSource) Size

func (m *DataSource) Size() (n int)

func (*DataSource) String

func (this *DataSource) String() string

func (*DataSource) Unmarshal

func (m *DataSource) Unmarshal(dAtA []byte) error

func (DataSource) Validate

func (datasource DataSource) Validate() (bool, []metav1.StatusCause)

func (DataSource) ValidateCreate

func (datasource DataSource) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (DataSource) ValidateDelete

func (datasource DataSource) ValidateDelete() error

func (DataSource) ValidateUpdate

func (datasource DataSource) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*DataSource) XXX_DiscardUnknown

func (m *DataSource) XXX_DiscardUnknown()

func (*DataSource) XXX_Marshal

func (m *DataSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataSource) XXX_Merge

func (m *DataSource) XXX_Merge(src proto.Message)

func (*DataSource) XXX_Size

func (m *DataSource) XXX_Size() int

func (*DataSource) XXX_Unmarshal

func (m *DataSource) XXX_Unmarshal(b []byte) error

type DataSourceConditionType

type DataSourceConditionType string

Condition on the dataset

type DataSourceList

type DataSourceList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []DataSource `json:"items" protobuf:"bytes,2,rep,name=items"`
}

DataSourceList contains a list of DataSource objects +kubebuilder:object:root=true

func (*DataSourceList) DeepCopy

func (in *DataSourceList) DeepCopy() *DataSourceList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourceList.

func (*DataSourceList) DeepCopyInto

func (in *DataSourceList) DeepCopyInto(out *DataSourceList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataSourceList) DeepCopyObject

func (in *DataSourceList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataSourceList) Descriptor

func (*DataSourceList) Descriptor() ([]byte, []int)

func (*DataSourceList) Marshal

func (m *DataSourceList) Marshal() (dAtA []byte, err error)

func (*DataSourceList) MarshalTo

func (m *DataSourceList) MarshalTo(dAtA []byte) (int, error)

func (*DataSourceList) MarshalToSizedBuffer

func (m *DataSourceList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataSourceList) ProtoMessage

func (*DataSourceList) ProtoMessage()

func (*DataSourceList) Reset

func (m *DataSourceList) Reset()

func (*DataSourceList) Size

func (m *DataSourceList) Size() (n int)

func (*DataSourceList) String

func (this *DataSourceList) String() string

func (*DataSourceList) Unmarshal

func (m *DataSourceList) Unmarshal(dAtA []byte) error

func (*DataSourceList) XXX_DiscardUnknown

func (m *DataSourceList) XXX_DiscardUnknown()

func (*DataSourceList) XXX_Marshal

func (m *DataSourceList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataSourceList) XXX_Merge

func (m *DataSourceList) XXX_Merge(src proto.Message)

func (*DataSourceList) XXX_Size

func (m *DataSourceList) XXX_Size() int

func (*DataSourceList) XXX_Unmarshal

func (m *DataSourceList) XXX_Unmarshal(b []byte) error

type DataSourceSpec

type DataSourceSpec struct {
	// The name of the Account which created the object, which exists in the same tenant as the object
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// VersionName references the name of a Data Product Version that describes the version of the resource
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// User-provided description of the object
	// +kubebuilder:validation:MaxLength=512
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// The type of dataset; currently, the only supported type is `tabular`
	// +kubebuilder:default:="tabular"
	// +kubebuilder:validation:Optional
	DatasetType *catalog.DatasetType `json:"datasetType,omitempty" protobuf:"bytes,4,opt,name=datasetType"`
	// The schema which will be used during the ingestion process of any Dataset resources which specify the DataSource
	Schema Schema `json:"schema,omitempty" protobuf:"bytes,5,opt,name=schema"`
	// Flat file spec define the paramter needed to read a flat file.
	// +kubebuilder:validation:Optional
	FlatFile *FlatFileFormatSpec `json:"flatfile,omitempty" protobuf:"bytes,6,opt,name=flatfile"`
	// If true, this datasource is for labeled data.
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	Labeled *bool `json:"labeled,omitempty" protobuf:"varint,7,opt,name=labeled"`
	// The specification for how incoming data should be sampled (i.e. how many rows should be used). Applicable
	// primarily for very large datasets
	Sample SampleSpec `json:"sample,omitempty" protobuf:"bytes,9,opt,name=sample"`
	// The machine learning task relevant to the Dataset. This field *must* be the same as the Data Source of the object
	// +kubebuilder:validation:Optional
	Task catalog.MLTask `json:"task,omitempty" protobuf:"bytes,10,opt,name=task"`
	// The machine learning subtask relevant to the primary task (text classification, image object detection, etc.)
	// +kubebuilder:default:="none"
	// +kubebuilder:validation:Optional
	SubTask *catalog.MLSubtask `json:"subtask,omitempty" protobuf:"bytes,11,opt,name=subtask"`
	// List of relationships to other data sources
	// +kubebuilder:validation:Optional
	Relationships []RelationshipSpec `json:"relationships,omitempty" protobuf:"bytes,12,rep,name=relationships"`
	// Labeling specificies how to automatically label the dataset using positive and negative rules
	// +kubebuilder:validation:Optional
	Labeling LabelingSpec `json:"labeling,omitempty" protobuf:"bytes,13,rep,name=labeling"`
	// InferredFrom specifies the location of the data that was used to generate the schema of the Data Source
	// +kubebuilder:validation:Optional
	InferredFrom *catalog.DataLocation `json:"inferredFrom,omitempty" protobuf:"bytes,14,opt,name=inferredFrom"`
	// The specification for tests for a new dataset
	// +kubebuilder:validation:Optional
	UnitTestsTemplate catalog.TestSuite `json:"unitTestsTemplate,omitempty" protobuf:"bytes,15,opt,name=unitTestsTemplate"`
	// +kubebuilder:validation:Optional
	IngestMethod *catalog.FeatureStoreIngestType `json:"ingestMethod,omitempty" protobuf:"bytes,16,opt,name=ingestMethod"`
}

DataSourceSpec defines the desired state of the DataSource

func (*DataSourceSpec) DeepCopy

func (in *DataSourceSpec) DeepCopy() *DataSourceSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourceSpec.

func (*DataSourceSpec) DeepCopyInto

func (in *DataSourceSpec) DeepCopyInto(out *DataSourceSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataSourceSpec) Descriptor

func (*DataSourceSpec) Descriptor() ([]byte, []int)

func (*DataSourceSpec) Marshal

func (m *DataSourceSpec) Marshal() (dAtA []byte, err error)

func (*DataSourceSpec) MarshalTo

func (m *DataSourceSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataSourceSpec) MarshalToSizedBuffer

func (m *DataSourceSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataSourceSpec) ProtoMessage

func (*DataSourceSpec) ProtoMessage()

func (*DataSourceSpec) Reset

func (m *DataSourceSpec) Reset()

func (*DataSourceSpec) Size

func (m *DataSourceSpec) Size() (n int)

func (*DataSourceSpec) String

func (this *DataSourceSpec) String() string

func (*DataSourceSpec) Unmarshal

func (m *DataSourceSpec) Unmarshal(dAtA []byte) error

func (*DataSourceSpec) XXX_DiscardUnknown

func (m *DataSourceSpec) XXX_DiscardUnknown()

func (*DataSourceSpec) XXX_Marshal

func (m *DataSourceSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataSourceSpec) XXX_Merge

func (m *DataSourceSpec) XXX_Merge(src proto.Message)

func (*DataSourceSpec) XXX_Size

func (m *DataSourceSpec) XXX_Size() int

func (*DataSourceSpec) XXX_Unmarshal

func (m *DataSourceSpec) XXX_Unmarshal(b []byte) error

type DataSourceStatus

type DataSourceStatus struct {
	// The number of columns determined to be present in the DataSource's schema
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	Cols int32 `json:"cols,omitempty" protobuf:"varint,1,opt,name=cols"`
	// ObservedGeneration is the last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,2,opt,name=observedGeneration"`
	// Last time the DataSource was used to create a Dataset
	//+kubebuilder:validation:Optional
	LastDatasetCreatedAt *metav1.Time `json:"lastDatasetCreatedAt,omitempty" protobuf:"bytes,3,opt,name=lastDatasetCreatedAt"`
	// The name of the last Dataset created using the DataSource
	//+kubebuilder:validation:Optional
	LastDatasetName string `json:"lastDatasetName,omitempty" protobuf:"bytes,4,opt,name=lastDatasetName"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,5,opt,name=updatedAt"`
	// In the case of failure, the DataSource resource controller will set this field with a failure reason
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,6,opt,name=failureReason"`
	// In the case of failure, the DataSource resource controller will set this field with a failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,7,opt,name=failureMessage"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,8,rep,name=conditions"`
}

DataSourceStatus defines the observed state of a DataSource object

func (*DataSourceStatus) DeepCopy

func (in *DataSourceStatus) DeepCopy() *DataSourceStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourceStatus.

func (*DataSourceStatus) DeepCopyInto

func (in *DataSourceStatus) DeepCopyInto(out *DataSourceStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataSourceStatus) Descriptor

func (*DataSourceStatus) Descriptor() ([]byte, []int)

func (*DataSourceStatus) Marshal

func (m *DataSourceStatus) Marshal() (dAtA []byte, err error)

func (*DataSourceStatus) MarshalTo

func (m *DataSourceStatus) MarshalTo(dAtA []byte) (int, error)

func (*DataSourceStatus) MarshalToSizedBuffer

func (m *DataSourceStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataSourceStatus) ProtoMessage

func (*DataSourceStatus) ProtoMessage()

func (*DataSourceStatus) Reset

func (m *DataSourceStatus) Reset()

func (*DataSourceStatus) Size

func (m *DataSourceStatus) Size() (n int)

func (*DataSourceStatus) String

func (this *DataSourceStatus) String() string

func (*DataSourceStatus) Unmarshal

func (m *DataSourceStatus) Unmarshal(dAtA []byte) error

func (*DataSourceStatus) XXX_DiscardUnknown

func (m *DataSourceStatus) XXX_DiscardUnknown()

func (*DataSourceStatus) XXX_Marshal

func (m *DataSourceStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataSourceStatus) XXX_Merge

func (m *DataSourceStatus) XXX_Merge(src proto.Message)

func (*DataSourceStatus) XXX_Size

func (m *DataSourceStatus) XXX_Size() int

func (*DataSourceStatus) XXX_Unmarshal

func (m *DataSourceStatus) XXX_Unmarshal(b []byte) error

type Dataset

type Dataset struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DatasetSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status DatasetStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:resource:path=datasets,shortName=ds,singular=dataset,categories={data,modela,all} +kubebuilder:subresource:status +kubebuilder:storageversion +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".status.version" +kubebuilder:printcolumn:name="Data Source",type="string",JSONPath=".spec.dataSourceName" +kubebuilder:printcolumn:name="Type",type="string",JSONPath=".spec.type" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:printcolumn:name="Last Snapshot",type="date",JSONPath=".status.lastSnapshotAt",description="" Dataset represents the specification for a dataset to be processed from an external data source

func ParseDatasetYaml

func ParseDatasetYaml(content []byte) (*Dataset, error)

func (*Dataset) AddFinalizer

func (dataset *Dataset) AddFinalizer()

func (*Dataset) CreateOrUpdateCondition added in v0.6.252

func (dataset *Dataset) CreateOrUpdateCondition(cond metav1.Condition)

Merge or update condition

func (*Dataset) DeepCopy

func (in *Dataset) DeepCopy() *Dataset

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Dataset.

func (*Dataset) DeepCopyInto

func (in *Dataset) DeepCopyInto(out *Dataset)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Dataset) DeepCopyObject

func (in *Dataset) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Dataset) Default

func (dataset *Dataset) Default()

func (*Dataset) Deleted

func (dataset *Dataset) Deleted() bool

func (*Dataset) Descriptor

func (*Dataset) Descriptor() ([]byte, []int)

func (*Dataset) GetCondition added in v0.6.252

func (dataset *Dataset) GetCondition(condType DatasetConditionType) metav1.Condition

func (*Dataset) GetConditionIndex added in v0.6.252

func (dataset *Dataset) GetConditionIndex(t string) int

func (*Dataset) GetObservedGeneration added in v0.6.252

func (dataset *Dataset) GetObservedGeneration() int64

func (*Dataset) GetStatus added in v0.6.252

func (dataset *Dataset) GetStatus() proto.Message

func (*Dataset) HasFinalizer

func (dataset *Dataset) HasFinalizer() bool

func (*Dataset) IsFeatureGroup added in v0.5.387

func (dataset *Dataset) IsFeatureGroup() bool

func (*Dataset) IsGroup added in v0.5.155

func (dataset *Dataset) IsGroup() bool

func (*Dataset) MarkNotSaved added in v0.6.252

func (dataset *Dataset) MarkNotSaved()

func (*Dataset) MarkSaved

func (dataset *Dataset) MarkSaved()

func (*Dataset) Marshal

func (m *Dataset) Marshal() (dAtA []byte, err error)

func (*Dataset) MarshalTo

func (m *Dataset) MarshalTo(dAtA []byte) (int, error)

func (*Dataset) MarshalToSizedBuffer

func (m *Dataset) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Dataset) OpName

func (r *Dataset) OpName() string

func (*Dataset) ProtoMessage

func (*Dataset) ProtoMessage()

func (*Dataset) RemoveFinalizer

func (dataset *Dataset) RemoveFinalizer()

func (*Dataset) Reset

func (m *Dataset) Reset()

func (*Dataset) RootURI added in v0.5.472

func (dataset *Dataset) RootURI() string

func (*Dataset) Saved

func (dataset *Dataset) Saved() bool

func (*Dataset) SetObservedGeneration added in v0.6.252

func (dataset *Dataset) SetObservedGeneration(generation int64)

func (*Dataset) SetStatus added in v0.6.252

func (dataset *Dataset) SetStatus(status interface{})

func (*Dataset) SetUpdatedAt added in v0.6.252

func (dataset *Dataset) SetUpdatedAt(time *metav1.Time)

func (*Dataset) SetupWebhookWithManager

func (dataset *Dataset) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Dataset) Size

func (m *Dataset) Size() (n int)

func (*Dataset) String

func (this *Dataset) String() string

func (*Dataset) Unmarshal

func (m *Dataset) Unmarshal(dAtA []byte) error

func (*Dataset) Validate added in v0.6.252

func (dataset *Dataset) Validate(field string) ([]metav1.StatusCause, bool)

func (*Dataset) ValidateCreate

func (dataset *Dataset) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Dataset) ValidateDelete

func (dataset *Dataset) ValidateDelete() error

func (*Dataset) ValidateUpdate

func (dataset *Dataset) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*Dataset) XXX_DiscardUnknown

func (m *Dataset) XXX_DiscardUnknown()

func (*Dataset) XXX_Marshal

func (m *Dataset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Dataset) XXX_Merge

func (m *Dataset) XXX_Merge(src proto.Message)

func (*Dataset) XXX_Size

func (m *Dataset) XXX_Size() int

func (*Dataset) XXX_Unmarshal

func (m *Dataset) XXX_Unmarshal(b []byte) error

type DatasetConditionType

type DatasetConditionType string
const (
	DatasetSaved DatasetConditionType = "Saved"
)

type DatasetGroupByStatus added in v0.5.172

type DatasetGroupByStatus struct {
	// The locations of the datasets files. Each file is the group
	// +kubebuilder:validation:Optional
	DatasetsURI string `json:"datasetsURI,omitempty" protobuf:"bytes,1,opt,name=datasetsURI"`
	// The locations of the datasets files.
	// +kubebuilder:validation:Optional
	ProfilesURI string `json:"profilesURI,omitempty" protobuf:"bytes,2,opt,name=profilesURI"`
	// The locations of the report file. One report for each key
	// +kubebuilder:validation:Optional
	ReportsURI string `json:"reportsURI,omitempty" protobuf:"bytes,3,opt,name=reportsURI"`
	// The locations of the report file. One report for each key
	// +kubebuilder:validation:Optional
	UnitTestsURI string `json:"unitTestsURI,omitempty" protobuf:"bytes,4,opt,name=unitTestsURI"`
	// Holds the worker on going result, when a worker finish, we update the location of thier result files
	// +kubebuilder:validation:Optional
	WorkerResults []catalog.WorkerRunResult `json:"workerResults,omitempty" protobuf:"bytes,6,rep,name=workerResults"`
}

Represent the status of a groupby

func (*DatasetGroupByStatus) DeepCopy added in v0.5.173

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetGroupByStatus.

func (*DatasetGroupByStatus) DeepCopyInto added in v0.5.173

func (in *DatasetGroupByStatus) DeepCopyInto(out *DatasetGroupByStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetGroupByStatus) Descriptor added in v0.5.172

func (*DatasetGroupByStatus) Descriptor() ([]byte, []int)

func (*DatasetGroupByStatus) Marshal added in v0.5.172

func (m *DatasetGroupByStatus) Marshal() (dAtA []byte, err error)

func (*DatasetGroupByStatus) MarshalTo added in v0.5.172

func (m *DatasetGroupByStatus) MarshalTo(dAtA []byte) (int, error)

func (*DatasetGroupByStatus) MarshalToSizedBuffer added in v0.5.172

func (m *DatasetGroupByStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetGroupByStatus) ProtoMessage added in v0.5.172

func (*DatasetGroupByStatus) ProtoMessage()

func (*DatasetGroupByStatus) Reset added in v0.5.172

func (m *DatasetGroupByStatus) Reset()

func (*DatasetGroupByStatus) Size added in v0.5.172

func (m *DatasetGroupByStatus) Size() (n int)

func (*DatasetGroupByStatus) String added in v0.5.172

func (this *DatasetGroupByStatus) String() string

func (*DatasetGroupByStatus) Unmarshal added in v0.5.172

func (m *DatasetGroupByStatus) Unmarshal(dAtA []byte) error

func (*DatasetGroupByStatus) XXX_DiscardUnknown added in v0.5.172

func (m *DatasetGroupByStatus) XXX_DiscardUnknown()

func (*DatasetGroupByStatus) XXX_Marshal added in v0.5.172

func (m *DatasetGroupByStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetGroupByStatus) XXX_Merge added in v0.5.172

func (m *DatasetGroupByStatus) XXX_Merge(src proto.Message)

func (*DatasetGroupByStatus) XXX_Size added in v0.5.172

func (m *DatasetGroupByStatus) XXX_Size() int

func (*DatasetGroupByStatus) XXX_Unmarshal added in v0.5.172

func (m *DatasetGroupByStatus) XXX_Unmarshal(b []byte) error

type DatasetList

type DatasetList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []Dataset `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true DatasetList contains a list of Datasets

func (*DatasetList) DeepCopy

func (in *DatasetList) DeepCopy() *DatasetList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetList.

func (*DatasetList) DeepCopyInto

func (in *DatasetList) DeepCopyInto(out *DatasetList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetList) DeepCopyObject

func (in *DatasetList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DatasetList) Descriptor

func (*DatasetList) Descriptor() ([]byte, []int)

func (*DatasetList) Marshal

func (m *DatasetList) Marshal() (dAtA []byte, err error)

func (*DatasetList) MarshalTo

func (m *DatasetList) MarshalTo(dAtA []byte) (int, error)

func (*DatasetList) MarshalToSizedBuffer

func (m *DatasetList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetList) ProtoMessage

func (*DatasetList) ProtoMessage()

func (*DatasetList) Reset

func (m *DatasetList) Reset()

func (*DatasetList) Size

func (m *DatasetList) Size() (n int)

func (*DatasetList) String

func (this *DatasetList) String() string

func (*DatasetList) Unmarshal

func (m *DatasetList) Unmarshal(dAtA []byte) error

func (*DatasetList) XXX_DiscardUnknown

func (m *DatasetList) XXX_DiscardUnknown()

func (*DatasetList) XXX_Marshal

func (m *DatasetList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetList) XXX_Merge

func (m *DatasetList) XXX_Merge(src proto.Message)

func (*DatasetList) XXX_Size

func (m *DatasetList) XXX_Size() int

func (*DatasetList) XXX_Unmarshal

func (m *DatasetList) XXX_Unmarshal(b []byte) error

type DatasetRole added in v0.4.914

type DatasetRole string

+kubebuilder:validation:Enum="unlabled";"serving";"feedback";"training";"prediction";"featuregroup";

const (
	DatasetRoleUnlabeled    DatasetRole = "unlabeled"    // Unlabeled dataset
	DatasetRoleFeatureGroup DatasetRole = "featuregroup" // Unlabeled dataset
	DatasetRoleServing      DatasetRole = "serving"      // Dataset contain serving data.
	DatasetRoleFeedback     DatasetRole = "feedback"     // feedback dataset which was labeled
	DatasetRoleTraining     DatasetRole = "training"     // Regular Labeled dataset
	DatasetRolePrediction   DatasetRole = "prediction"   // Regular Labeled dataset
)

type DatasetSnapshot added in v0.6.252

type DatasetSnapshot struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DatasetSnapshotSpec   `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	Status            DatasetSnapshotStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:resource:path=datasetsnapshots,shortName=dss,singular=datasetsnapshot,categories={data,modela,all} +kubebuilder:subresource:status +kubebuilder:storageversion +kubebuilder:printcolumn:name="Phase",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="integer",JSONPath=".spec.runVersion" +kubebuilder:printcolumn:name="Dataset",type="string",JSONPath=".spec.datasetName" +kubebuilder:printcolumn:name="Rows",type="integer",JSONPath=".status.statistics.rows" +kubebuilder:printcolumn:name="Columns",type="integer",JSONPath=".status.statistics.columns" +kubebuilder:printcolumn:name="Size",type="integer",JSONPath=".status.statistics.sizeInBytes" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" DatasetSnapshot represents an ephemeral snapshot of a Dataset

func ParseDatasetSnapshotYaml added in v0.6.252

func ParseDatasetSnapshotYaml(content []byte) (*DatasetSnapshot, error)

func (*DatasetSnapshot) Aborted added in v0.6.252

func (snapshot *DatasetSnapshot) Aborted() bool

func (*DatasetSnapshot) AddFinalizer added in v0.6.252

func (snapshot *DatasetSnapshot) AddFinalizer()

func (*DatasetSnapshot) CompletionAlert added in v0.6.252

func (snapshot *DatasetSnapshot) CompletionAlert(notification catalog.NotificationSpec) *infra.Alert

func (*DatasetSnapshot) ConstructFeatureHistogram added in v0.6.252

func (snapshot *DatasetSnapshot) ConstructFeatureHistogram() (*FeatureHistogram, error)

func (*DatasetSnapshot) CreateOrUpdateCondition added in v0.6.252

func (snapshot *DatasetSnapshot) CreateOrUpdateCondition(cond metav1.Condition)

Merge or update condition

func (*DatasetSnapshot) DataSourceManifestURI added in v0.6.252

func (snapshot *DatasetSnapshot) DataSourceManifestURI() string

func (*DatasetSnapshot) DatasetManifestURI added in v0.6.252

func (snapshot *DatasetSnapshot) DatasetManifestURI() string

func (*DatasetSnapshot) DeepCopy added in v0.6.252

func (in *DatasetSnapshot) DeepCopy() *DatasetSnapshot

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSnapshot.

func (*DatasetSnapshot) DeepCopyInto added in v0.6.252

func (in *DatasetSnapshot) DeepCopyInto(out *DatasetSnapshot)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetSnapshot) DeepCopyObject added in v0.6.252

func (in *DatasetSnapshot) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DatasetSnapshot) Default added in v0.6.252

func (snapshot *DatasetSnapshot) Default()

func (*DatasetSnapshot) Deleted added in v0.6.252

func (snapshot *DatasetSnapshot) Deleted() bool

func (*DatasetSnapshot) Descriptor added in v0.6.252

func (*DatasetSnapshot) Descriptor() ([]byte, []int)

func (*DatasetSnapshot) DriftColumnNames added in v0.6.252

func (snapshot *DatasetSnapshot) DriftColumnNames() []string

return the list of drift. Currently return the drift columns

func (*DatasetSnapshot) ErrorAlert added in v0.6.252

func (snapshot *DatasetSnapshot) ErrorAlert(notification catalog.NotificationSpec, err error) *infra.Alert

func (*DatasetSnapshot) ExternalStatusUpdated added in v0.6.252

func (snapshot *DatasetSnapshot) ExternalStatusUpdated() bool

func (*DatasetSnapshot) Failed added in v0.6.252

func (snapshot *DatasetSnapshot) Failed() bool

func (*DatasetSnapshot) Generated added in v0.6.252

func (snapshot *DatasetSnapshot) Generated() bool

func (*DatasetSnapshot) GetColumn added in v0.6.252

func (snapshot *DatasetSnapshot) GetColumn(name string) (*FeatureStatistics, error)

Search for a column stat, based on name

func (*DatasetSnapshot) GetCondition added in v0.6.252

func (snapshot *DatasetSnapshot) GetCondition(condType DatasetSnapshotConditionType) metav1.Condition

func (*DatasetSnapshot) GetConditionIndex added in v0.6.252

func (snapshot *DatasetSnapshot) GetConditionIndex(t string) int

func (*DatasetSnapshot) GetObservedGeneration added in v0.6.252

func (snapshot *DatasetSnapshot) GetObservedGeneration() int64

func (*DatasetSnapshot) GetStatus added in v0.6.252

func (snapshot *DatasetSnapshot) GetStatus() proto.Message

func (*DatasetSnapshot) GroupDataFile added in v0.6.252

func (snapshot *DatasetSnapshot) GroupDataFile(keys []string) string

func (*DatasetSnapshot) GroupDataFolder added in v0.6.252

func (snapshot *DatasetSnapshot) GroupDataFolder(keys []string) string

func (*DatasetSnapshot) GroupFolder added in v0.6.252

func (snapshot *DatasetSnapshot) GroupFolder(keys []string) string

func (*DatasetSnapshot) GroupForecastFile added in v0.6.252

func (snapshot *DatasetSnapshot) GroupForecastFile(keys []string) string

func (*DatasetSnapshot) GroupProfileFolder added in v0.6.252

func (snapshot *DatasetSnapshot) GroupProfileFolder(keys []string) string

func (*DatasetSnapshot) GroupReportFile added in v0.6.252

func (snapshot *DatasetSnapshot) GroupReportFile(keys []string) string

func (*DatasetSnapshot) Grouped added in v0.6.252

func (snapshot *DatasetSnapshot) Grouped() bool

func (*DatasetSnapshot) GroupsFolder added in v0.6.252

func (snapshot *DatasetSnapshot) GroupsFolder() string

func (*DatasetSnapshot) HasFinalizer added in v0.6.252

func (snapshot *DatasetSnapshot) HasFinalizer() bool

func (*DatasetSnapshot) HasScheduleTrigger added in v0.6.252

func (snapshot *DatasetSnapshot) HasScheduleTrigger() bool

func (*DatasetSnapshot) IndexFileKey added in v0.6.252

func (snapshot *DatasetSnapshot) IndexFileKey() string

func (*DatasetSnapshot) ManifestURI added in v0.6.252

func (snapshot *DatasetSnapshot) ManifestURI() string

func (*DatasetSnapshot) MarkAborted added in v0.6.252

func (snapshot *DatasetSnapshot) MarkAborted()

func (*DatasetSnapshot) MarkExternalStatusNotUpdated added in v0.6.252

func (snapshot *DatasetSnapshot) MarkExternalStatusNotUpdated()

func (*DatasetSnapshot) MarkExternalStatusUpdated added in v0.6.252

func (snapshot *DatasetSnapshot) MarkExternalStatusUpdated()

func (*DatasetSnapshot) MarkGenerated added in v0.6.252

func (snapshot *DatasetSnapshot) MarkGenerated()

func (*DatasetSnapshot) MarkGeneratedFailed added in v0.6.252

func (snapshot *DatasetSnapshot) MarkGeneratedFailed(reason string, msg string)

func (*DatasetSnapshot) MarkGenerating added in v0.6.252

func (snapshot *DatasetSnapshot) MarkGenerating()

func (*DatasetSnapshot) MarkGroupFailed added in v0.6.252

func (snapshot *DatasetSnapshot) MarkGroupFailed(reason string, msg string)

func (*DatasetSnapshot) MarkGroupSuccess added in v0.6.252

func (snapshot *DatasetSnapshot) MarkGroupSuccess()

func (*DatasetSnapshot) MarkGrouping added in v0.6.252

func (snapshot *DatasetSnapshot) MarkGrouping()

func (*DatasetSnapshot) MarkPrepareFailed added in v0.6.252

func (snapshot *DatasetSnapshot) MarkPrepareFailed(reason string, msg string)

func (*DatasetSnapshot) MarkPrepareSuccess added in v0.6.252

func (snapshot *DatasetSnapshot) MarkPrepareSuccess()

func (*DatasetSnapshot) MarkPreparing added in v0.6.252

func (snapshot *DatasetSnapshot) MarkPreparing()

func (*DatasetSnapshot) MarkProfileFailed added in v0.6.252

func (snapshot *DatasetSnapshot) MarkProfileFailed(reason string, err string)

func (*DatasetSnapshot) MarkProfiled added in v0.6.252

func (snapshot *DatasetSnapshot) MarkProfiled(profileLocation catalog.FileLocation)

func (*DatasetSnapshot) MarkProfiling added in v0.6.252

func (snapshot *DatasetSnapshot) MarkProfiling()

func (*DatasetSnapshot) MarkReady added in v0.6.252

func (snapshot *DatasetSnapshot) MarkReady()

func (*DatasetSnapshot) MarkReadyFailed added in v0.6.252

func (snapshot *DatasetSnapshot) MarkReadyFailed(reason string, err string)

func (*DatasetSnapshot) MarkReportFailed added in v0.6.252

func (snapshot *DatasetSnapshot) MarkReportFailed(reason string, msg string)

func (*DatasetSnapshot) MarkReported added in v0.6.252

func (snapshot *DatasetSnapshot) MarkReported()

func (*DatasetSnapshot) MarkReporting added in v0.6.252

func (snapshot *DatasetSnapshot) MarkReporting()

func (*DatasetSnapshot) MarkSkewColumns added in v0.6.252

func (snapshot *DatasetSnapshot) MarkSkewColumns()

func (*DatasetSnapshot) MarkSnapshotFailed added in v0.6.252

func (snapshot *DatasetSnapshot) MarkSnapshotFailed(reason string, msg string)

func (*DatasetSnapshot) MarkSnapshotSuccess added in v0.6.252

func (snapshot *DatasetSnapshot) MarkSnapshotSuccess()

func (*DatasetSnapshot) MarkTakingSnapshot added in v0.6.252

func (snapshot *DatasetSnapshot) MarkTakingSnapshot()

func (*DatasetSnapshot) MarkUnitTestFailed added in v0.6.252

func (snapshot *DatasetSnapshot) MarkUnitTestFailed(reason string, msg string)

func (*DatasetSnapshot) MarkUnitTested added in v0.6.252

func (snapshot *DatasetSnapshot) MarkUnitTested()

func (*DatasetSnapshot) MarkUnitTesting added in v0.6.252

func (snapshot *DatasetSnapshot) MarkUnitTesting()

func (*DatasetSnapshot) Marshal added in v0.6.252

func (m *DatasetSnapshot) Marshal() (dAtA []byte, err error)

func (*DatasetSnapshot) MarshalTo added in v0.6.252

func (m *DatasetSnapshot) MarshalTo(dAtA []byte) (int, error)

func (*DatasetSnapshot) MarshalToSizedBuffer added in v0.6.252

func (m *DatasetSnapshot) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetSnapshot) Paused added in v0.6.252

func (snapshot *DatasetSnapshot) Paused() bool

func (*DatasetSnapshot) Prepared added in v0.6.252

func (snapshot *DatasetSnapshot) Prepared() bool

func (*DatasetSnapshot) ProfileURI added in v0.6.252

func (snapshot *DatasetSnapshot) ProfileURI() string

func (*DatasetSnapshot) Profiled added in v0.6.252

func (snapshot *DatasetSnapshot) Profiled() bool

func (*DatasetSnapshot) ProtoMessage added in v0.6.252

func (*DatasetSnapshot) ProtoMessage()

func (*DatasetSnapshot) ReachedMaxTime added in v0.6.252

func (snapshot *DatasetSnapshot) ReachedMaxTime(dataset *Dataset) bool

func (*DatasetSnapshot) Ready added in v0.6.252

func (snapshot *DatasetSnapshot) Ready() bool

func (*DatasetSnapshot) RefreshProgress added in v0.6.252

func (snapshot *DatasetSnapshot) RefreshProgress()

func (*DatasetSnapshot) RemoveFinalizer added in v0.6.252

func (snapshot *DatasetSnapshot) RemoveFinalizer()

func (*DatasetSnapshot) ReportName added in v0.6.252

func (snapshot *DatasetSnapshot) ReportName() string

func (*DatasetSnapshot) ReportURI added in v0.6.252

func (snapshot *DatasetSnapshot) ReportURI() string

func (*DatasetSnapshot) Reported added in v0.6.252

func (snapshot *DatasetSnapshot) Reported() bool

func (*DatasetSnapshot) Reset added in v0.6.252

func (m *DatasetSnapshot) Reset()

func (*DatasetSnapshot) RootURI added in v0.6.252

func (snapshot *DatasetSnapshot) RootURI() string

func (*DatasetSnapshot) SetObservedGeneration added in v0.6.252

func (snapshot *DatasetSnapshot) SetObservedGeneration(generation int64)

func (*DatasetSnapshot) SetStatus added in v0.6.252

func (snapshot *DatasetSnapshot) SetStatus(status interface{})

func (*DatasetSnapshot) SetUpdatedAt added in v0.6.252

func (snapshot *DatasetSnapshot) SetUpdatedAt(time *metav1.Time)

func (*DatasetSnapshot) SetupWebhookWithManager added in v0.6.252

func (snapshot *DatasetSnapshot) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DatasetSnapshot) Size added in v0.6.252

func (m *DatasetSnapshot) Size() (n int)

func (*DatasetSnapshot) Snapshotted added in v0.6.252

func (snapshot *DatasetSnapshot) Snapshotted() bool

func (*DatasetSnapshot) String added in v0.6.252

func (this *DatasetSnapshot) String() string

func (*DatasetSnapshot) TaskIndexFileKey added in v0.6.252

func (snapshot *DatasetSnapshot) TaskIndexFileKey(task string) string

func (*DatasetSnapshot) ToRunReference added in v0.6.252

func (snapshot *DatasetSnapshot) ToRunReference() catalog.RunReference

func (*DatasetSnapshot) UnitTested added in v0.6.252

func (snapshot *DatasetSnapshot) UnitTested() bool

func (*DatasetSnapshot) Unmarshal added in v0.6.252

func (m *DatasetSnapshot) Unmarshal(dAtA []byte) error

func (*DatasetSnapshot) ValidateCreate added in v0.6.252

func (snapshot *DatasetSnapshot) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DatasetSnapshot) ValidateDelete added in v0.6.252

func (snapshot *DatasetSnapshot) ValidateDelete() error

func (*DatasetSnapshot) ValidateUpdate added in v0.6.252

func (snapshot *DatasetSnapshot) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*DatasetSnapshot) WorkerIndexFileKey added in v0.6.252

func (snapshot *DatasetSnapshot) WorkerIndexFileKey(workerIndex int, task string) string

func (*DatasetSnapshot) XXX_DiscardUnknown added in v0.6.252

func (m *DatasetSnapshot) XXX_DiscardUnknown()

func (*DatasetSnapshot) XXX_Marshal added in v0.6.252

func (m *DatasetSnapshot) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetSnapshot) XXX_Merge added in v0.6.252

func (m *DatasetSnapshot) XXX_Merge(src proto.Message)

func (*DatasetSnapshot) XXX_Size added in v0.6.252

func (m *DatasetSnapshot) XXX_Size() int

func (*DatasetSnapshot) XXX_Unmarshal added in v0.6.252

func (m *DatasetSnapshot) XXX_Unmarshal(b []byte) error

type DatasetSnapshotConditionType added in v0.6.252

type DatasetSnapshotConditionType string
const (
	DatasetSnapshotPrepared              DatasetSnapshotConditionType = "Prepared"
	DatasetSnapshotReported              DatasetSnapshotConditionType = "Reported"
	DatasetSnapshotUnitTested            DatasetSnapshotConditionType = "UnitTested"
	DatasetSnapshotSnapshotTaken         DatasetSnapshotConditionType = "SnapshotTaken"
	DatasetSnapshotProfiled              DatasetSnapshotConditionType = "Profiled"
	DatasetSnapshotGrouped               DatasetSnapshotConditionType = "Grouped"
	DatasetSnapshotGenerated             DatasetSnapshotConditionType = "Generated"
	DatasetSnapshotReady                 DatasetSnapshotConditionType = "Ready"
	DatasetSnapshotPaused                DatasetSnapshotConditionType = "Paused"
	DatasetSnapshotAborted               DatasetSnapshotConditionType = "Aborted"
	DatasetSnapshotExternalStatusUpdated DatasetSnapshotConditionType = "ExternalStatusUpdated"
)

type DatasetSnapshotList added in v0.6.252

type DatasetSnapshotList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []DatasetSnapshot `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true DatasetList contains a list of Datasets

func (*DatasetSnapshotList) DeepCopy added in v0.6.252

func (in *DatasetSnapshotList) DeepCopy() *DatasetSnapshotList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSnapshotList.

func (*DatasetSnapshotList) DeepCopyInto added in v0.6.252

func (in *DatasetSnapshotList) DeepCopyInto(out *DatasetSnapshotList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetSnapshotList) DeepCopyObject added in v0.6.252

func (in *DatasetSnapshotList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DatasetSnapshotList) Descriptor added in v0.6.252

func (*DatasetSnapshotList) Descriptor() ([]byte, []int)

func (*DatasetSnapshotList) Marshal added in v0.6.252

func (m *DatasetSnapshotList) Marshal() (dAtA []byte, err error)

func (*DatasetSnapshotList) MarshalTo added in v0.6.252

func (m *DatasetSnapshotList) MarshalTo(dAtA []byte) (int, error)

func (*DatasetSnapshotList) MarshalToSizedBuffer added in v0.6.252

func (m *DatasetSnapshotList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetSnapshotList) ProtoMessage added in v0.6.252

func (*DatasetSnapshotList) ProtoMessage()

func (*DatasetSnapshotList) Reset added in v0.6.252

func (m *DatasetSnapshotList) Reset()

func (*DatasetSnapshotList) Size added in v0.6.252

func (m *DatasetSnapshotList) Size() (n int)

func (*DatasetSnapshotList) String added in v0.6.252

func (this *DatasetSnapshotList) String() string

func (*DatasetSnapshotList) Unmarshal added in v0.6.252

func (m *DatasetSnapshotList) Unmarshal(dAtA []byte) error

func (*DatasetSnapshotList) XXX_DiscardUnknown added in v0.6.252

func (m *DatasetSnapshotList) XXX_DiscardUnknown()

func (*DatasetSnapshotList) XXX_Marshal added in v0.6.252

func (m *DatasetSnapshotList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetSnapshotList) XXX_Merge added in v0.6.252

func (m *DatasetSnapshotList) XXX_Merge(src proto.Message)

func (*DatasetSnapshotList) XXX_Size added in v0.6.252

func (m *DatasetSnapshotList) XXX_Size() int

func (*DatasetSnapshotList) XXX_Unmarshal added in v0.6.252

func (m *DatasetSnapshotList) XXX_Unmarshal(b []byte) error

type DatasetSnapshotPhase added in v0.6.252

type DatasetSnapshotPhase string
const (
	DatasetSnapshotPhasePending         DatasetSnapshotPhase = "Pending"
	DatasetSnapshotPhasePreparing       DatasetSnapshotPhase = "Preparing"
	DatasetSnapshotPhasePrepared        DatasetSnapshotPhase = "Prepared"
	DatasetSnapshotPhaseGenerating      DatasetSnapshotPhase = "Generating"
	DatasetSnapshotPhaseGenerateSuccess DatasetSnapshotPhase = "Generated"
	DatasetSnapshotPhaseIngestRunning   DatasetSnapshotPhase = "Ingesting"
	DatasetSnapshotPhaseIngestSuccess   DatasetSnapshotPhase = "Ingested"
	DatasetSnapshotPhaseGrouping        DatasetSnapshotPhase = "Grouping"
	DatasetSnapshotPhaseGrouped         DatasetSnapshotPhase = "Grouped"
	DatasetSnapshotPhaseReportRunning   DatasetSnapshotPhase = "Reporting"
	DatasetSnapshotPhaseReportSuccess   DatasetSnapshotPhase = "Reported"
	DatasetSnapshotPhaseProfileRunning  DatasetSnapshotPhase = "Profiling"
	DatasetSnapshotPhaseProfileSuccess  DatasetSnapshotPhase = "Profiled"
	DatasetSnapshotPhaseUnitTesting     DatasetSnapshotPhase = "UnitTesting"
	DatasetSnapshotPhaseSnapshotRunning DatasetSnapshotPhase = "TakingSnapshot"
	DatasetSnapshotPhaseSnapshotSuccess DatasetSnapshotPhase = "Snapshotted"
	DatasetSnapshotPhaseFailed          DatasetSnapshotPhase = "Failed"
	DatasetSnapshotPhaseAborted         DatasetSnapshotPhase = "Aborted"
	DatasetSnapshotPhaseReady           DatasetSnapshotPhase = "Ready"
)

type DatasetSnapshotSpec added in v0.6.252

type DatasetSnapshotSpec struct {
	// Owner specifies the name of the Account which the object belongs to
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// DatasetName specifies the name of the Dataset resource which the snapshot will use to
	// determine the location, format, and post-processing options for the dataset
	// +kubebuilder:validation:Required
	// +required
	DatasetName string `json:"datasetName,omitempty" protobuf:"varint,2,opt,name=datasetName"`
	// Timeout specifies the time in seconds for the snapshot to be completed
	// +kubebuilder:validation:Optional
	Timeout *int32 `json:"timeout,omitempty" protobuf:"varint,3,opt,name=timeout"`
	// If true, the execution of new workloads associated with the run will be paused
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Pause *bool `json:"pause,omitempty" protobuf:"varint,4,opt,name=pause"`
	// If true, the run will be permanently aborted and all workloads created by the run will be removed
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Abort *bool `json:"abort,omitempty" protobuf:"varint,5,opt,name=abort"`
	// ModelClassRunName specifies the name of the Model Class Run which created the run, if applicable
	// +kubebuilder:validation:Optional
	ModelClassRunName *string `json:"modelClassRunName,omitempty" protobuf:"bytes,6,opt,name=modelClassRunName"`
}

DatasetSnapshotSpec defines the run configuration for a Dataset

func (*DatasetSnapshotSpec) DeepCopy added in v0.6.252

func (in *DatasetSnapshotSpec) DeepCopy() *DatasetSnapshotSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSnapshotSpec.

func (*DatasetSnapshotSpec) DeepCopyInto added in v0.6.252

func (in *DatasetSnapshotSpec) DeepCopyInto(out *DatasetSnapshotSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetSnapshotSpec) Descriptor added in v0.6.252

func (*DatasetSnapshotSpec) Descriptor() ([]byte, []int)

func (*DatasetSnapshotSpec) Marshal added in v0.6.252

func (m *DatasetSnapshotSpec) Marshal() (dAtA []byte, err error)

func (*DatasetSnapshotSpec) MarshalTo added in v0.6.252

func (m *DatasetSnapshotSpec) MarshalTo(dAtA []byte) (int, error)

func (*DatasetSnapshotSpec) MarshalToSizedBuffer added in v0.6.252

func (m *DatasetSnapshotSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetSnapshotSpec) ProtoMessage added in v0.6.252

func (*DatasetSnapshotSpec) ProtoMessage()

func (*DatasetSnapshotSpec) Reset added in v0.6.252

func (m *DatasetSnapshotSpec) Reset()

func (*DatasetSnapshotSpec) Size added in v0.6.252

func (m *DatasetSnapshotSpec) Size() (n int)

func (*DatasetSnapshotSpec) String added in v0.6.252

func (this *DatasetSnapshotSpec) String() string

func (*DatasetSnapshotSpec) Unmarshal added in v0.6.252

func (m *DatasetSnapshotSpec) Unmarshal(dAtA []byte) error

func (*DatasetSnapshotSpec) XXX_DiscardUnknown added in v0.6.252

func (m *DatasetSnapshotSpec) XXX_DiscardUnknown()

func (*DatasetSnapshotSpec) XXX_Marshal added in v0.6.252

func (m *DatasetSnapshotSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetSnapshotSpec) XXX_Merge added in v0.6.252

func (m *DatasetSnapshotSpec) XXX_Merge(src proto.Message)

func (*DatasetSnapshotSpec) XXX_Size added in v0.6.252

func (m *DatasetSnapshotSpec) XXX_Size() int

func (*DatasetSnapshotSpec) XXX_Unmarshal added in v0.6.252

func (m *DatasetSnapshotSpec) XXX_Unmarshal(b []byte) error

type DatasetSnapshotStatus added in v0.6.252

type DatasetSnapshotStatus struct {
	// ObservedGeneration is the last generation that was reconciled
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// DatasetManifestLocation specifies the location of Dataset manifest used during the course of the snapshot
	DatasetManifestLocation catalog.ManifestLocation `json:"datasetManifestLocation,omitempty" protobuf:"bytes,2,opt,name=datasetManifestLocation"`
	// DatasetManifestLocation specifies the location of Data Source manifest used during the course of the snapshot
	DataSourceManifestLocation catalog.ManifestLocation `json:"dataSourceManifestLocation,omitempty" protobuf:"bytes,3,opt,name=dataSourceManifestLocation"`
	// SnapshotVersion specifies the version of the snapshot, which is determined when the Dataset manifest is saved
	SnapshotVersion catalog.Version `json:"snapshotVersion" protobuf:"varint,4,opt,name=snapshotVersion"`
	// DatasetStatistics contains statistics for each column of the dataset generated during the dataset profile
	// +kubebuilder:validation:Optional
	Statistics DatasetStatistics `json:"statistics,omitempty" protobuf:"bytes,5,opt,name=statistics"`
	// Phase defines the current phase of the run relative to its progress
	// +kubebuilder:default:="Pending"
	// +kubebuilder:validation:Optional
	Phase DatasetSnapshotPhase `json:"phase,omitempty" protobuf:"bytes,6,opt,name=phase"`
	// ReportName specifies the name of the Report created by the run
	// +kubebuilder:validation:Optional
	ReportName string `json:"reportName,omitempty" protobuf:"bytes,7,opt,name=reportName"`
	// DataLocation contains the location of the dataset snapshot
	// +kubebuilder:validation:Optional
	DataLocation catalog.FileLocation `json:"dataLocation,omitempty" protobuf:"bytes,8,opt,name=dataLocation"`
	// ReportLocation contains the location of the report file generated by the snapshot's Report resource
	// +kubebuilder:validation:Optional
	ReportLocation catalog.FileLocation `json:"reportLocation,omitempty" protobuf:"bytes,9,opt,name=reportLocation"`
	// ProfileLocation contains the location for the raw profile data
	// +kubebuilder:validation:Optional
	ProfileLocation catalog.FileLocation `json:"profileLocation" protobuf:"bytes,10,opt,name=profileLocation"`
	// AnomaliesLocation contains the location of the anomaly file, containing a list of rows
	// which were determined as anomalies by an isolation forest algorithm
	// +kubebuilder:validation:Optional
	AnomaliesLocation catalog.FileLocation `json:"anomaliesLocation" protobuf:"bytes,11,opt,name=anomaliesLocation"`
	// SampleLocation contains the location of dataset sample
	// +kubebuilder:validation:Optional
	SampleLocation catalog.FileLocation `json:"sampleLocation" protobuf:"bytes,12,opt,name=sampleLocation"`
	// Imbalanced indicates if the classes in the dataset were detected as imbalanced
	// +kubebuilder:validation:Optional
	Imbalanced bool `json:"imbalanced,omitempty" protobuf:"varint,13,opt,name=imbalanced"`
	// UnitTestResults contains the results of the unit test phase
	//+kubebuilder:validation:Optional
	UnitTestResults *catalog.TestSuiteResult `json:"unitTestResults,omitempty" protobuf:"bytes,14,opt,name=unitTestResults"`
	// FailureMessage is set to a failure message in the case that an error occurred during the snapshot
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,15,opt,name=failureMessage"`
	// Progress defines the current numerical progress of the run, from 0 to 100
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	Progress int32 `json:"progress,omitempty" protobuf:"varint,16,opt,name=progress"`
	// Logs contains the location of all logs produced by snapshot workloads
	// +kubebuilder:validation:Optional
	Logs catalog.Logs `json:"logs" protobuf:"bytes,17,opt,name=logs"`
	// +kubebuilder:validation:Optional
	Hash string `json:"hash,omitempty" protobuf:"bytes,18,opt,name=hash"`
	// UpdatedAt specifies the last time the snapshot was updated
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,19,opt,name=updatedAt"`
	// CompletedAt specifies the time at which the snapshot completed or failed
	// +kubebuilder:validation:Optional
	CompletedAt *metav1.Time `json:"completedAt,omitempty" protobuf:"bytes,20,opt,name=completedAt"`
	// LastStudyAt specifies the last time the snapshot was used to create a Study
	//+kubebuilder:validation:Optional
	LastStudyAt *metav1.Time `json:"lastStudyAt,omitempty" protobuf:"bytes,21,opt,name=lastStudyAt"`
	// Images contains the container images used by the run
	// +kubebuilder:validation:Optional
	Images *catalog.Images `json:"images,omitempty" protobuf:"bytes,22,opt,name=images"`
	// FeatureHistogramName references the feature histogram generated by the snapshot
	// +kubebuilder:validation:Optional
	FeatureHistogramName *string `json:"featureHistogramName,omitempty" protobuf:"bytes,23,opt,name=featureHistogramName"`
	// GroupBy contains the index files for datasets produced through group-by operations
	// +kubebuilder:validation:Optional
	GroupBy *DatasetGroupByStatus `json:"groupBy,omitempty" protobuf:"bytes,24,opt,name=groupBy"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,25,rep,name=conditions"`
}

DatasetSnapshotStatus specifies the observed state of a DatasetSnapshot

func (*DatasetSnapshotStatus) DeepCopy added in v0.6.252

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSnapshotStatus.

func (*DatasetSnapshotStatus) DeepCopyInto added in v0.6.252

func (in *DatasetSnapshotStatus) DeepCopyInto(out *DatasetSnapshotStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetSnapshotStatus) Descriptor added in v0.6.252

func (*DatasetSnapshotStatus) Descriptor() ([]byte, []int)

func (*DatasetSnapshotStatus) Marshal added in v0.6.252

func (m *DatasetSnapshotStatus) Marshal() (dAtA []byte, err error)

func (*DatasetSnapshotStatus) MarshalTo added in v0.6.252

func (m *DatasetSnapshotStatus) MarshalTo(dAtA []byte) (int, error)

func (*DatasetSnapshotStatus) MarshalToSizedBuffer added in v0.6.252

func (m *DatasetSnapshotStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetSnapshotStatus) ProtoMessage added in v0.6.252

func (*DatasetSnapshotStatus) ProtoMessage()

func (*DatasetSnapshotStatus) Reset added in v0.6.252

func (m *DatasetSnapshotStatus) Reset()

func (*DatasetSnapshotStatus) Size added in v0.6.252

func (m *DatasetSnapshotStatus) Size() (n int)

func (*DatasetSnapshotStatus) String added in v0.6.252

func (this *DatasetSnapshotStatus) String() string

func (*DatasetSnapshotStatus) Unmarshal added in v0.6.252

func (m *DatasetSnapshotStatus) Unmarshal(dAtA []byte) error

func (*DatasetSnapshotStatus) XXX_DiscardUnknown added in v0.6.252

func (m *DatasetSnapshotStatus) XXX_DiscardUnknown()

func (*DatasetSnapshotStatus) XXX_Marshal added in v0.6.252

func (m *DatasetSnapshotStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetSnapshotStatus) XXX_Merge added in v0.6.252

func (m *DatasetSnapshotStatus) XXX_Merge(src proto.Message)

func (*DatasetSnapshotStatus) XXX_Size added in v0.6.252

func (m *DatasetSnapshotStatus) XXX_Size() int

func (*DatasetSnapshotStatus) XXX_Unmarshal added in v0.6.252

func (m *DatasetSnapshotStatus) XXX_Unmarshal(b []byte) error

type DatasetSpec

type DatasetSpec struct {
	// Owner specifies the name of the Account which the object belongs to
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// DataSourceName specifies the name of the Data Source which Dataset runs will use as a schema for incoming data
	// +kubebuilder:validation:Required
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:default:=""
	// +required
	DataSourceName string `json:"dataSourceName,omitempty" protobuf:"bytes,2,opt,name=dataSourceName"`
	// Description contains the user-provided description of the object
	// +kubebuilder:validation:MaxLength=512
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// RunSpec specifies the configuration for the execution of snapshots created by the Dataset
	// +kubebuilder:validation:Required
	Run catalog.RunSpec `json:"run,omitempty" protobuf:"bytes,4,opt,name=run"`
	// The name of the Virtual Bucket where artifacts generated by the Dataset will be stored.
	// If empty, default to the Data Product's default artifact bucket
	// +kubebuilder:validation:Optional
	ArtifactBucketName *string `json:"artifactBucketName,omitempty" protobuf:"bytes,5,opt,name=artifactBucketName"`
	// The name of the Lab under which Jobs created by the Dataset will be executed.
	// If empty, default to the Data Product's default Lab
	// +kubebuilder:validation:Optional
	LabName *string `json:"labName,omitempty" protobuf:"bytes,6,opt,name=labName"`
	// Role specifies the role of the dataset (training, feedback, etc.)
	// +kubebuilder:default:="training"
	// +kubebuilder:validation:Optional
	Role DatasetRole `json:"role,omitempty" protobuf:"bytes,7,opt,name=role"`
	// The machine learning task relevant to the Dataset. This field must be the same as the task specified by the Data Source
	// +kubebuilder:validation:Optional
	Task catalog.MLTask `json:"task,omitempty" protobuf:"bytes,8,opt,name=task"`
	// The machine learning sub-task relevant to the Dataset. This field must be the same as the subtask specified by the Data Source
	// +kubebuilder:default:=none
	// +kubebuilder:validation:Optional
	SubTask *catalog.MLSubtask `json:"subtask,omitempty" protobuf:"bytes,9,opt,name=subtask"`
	// The type of dataset which was uploaded. Tabular is the only supported type as of the current release
	// +kubebuilder:default:="tabular"
	// +kubebuilder:validation:Optional
	Type *catalog.DatasetType `json:"type,omitempty" protobuf:"bytes,10,opt,name=type"`
	// Origin is the location of the raw data that will be read
	// +kubebuilder:validation:Optional
	Origin catalog.DataLocation `json:"origin,omitempty" protobuf:"bytes,11,opt,name=origin"`
	// Resources specifies the resource requirements that will be allocated for the Dataset
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,12,opt,name=resources"`
	// UnitTests specifies the unit tests that will be run against ingested datasets
	// +kubebuilder:validation:Optional
	UnitTests *catalog.TestSuite `json:"unitTests,omitempty" protobuf:"bytes,13,opt,name=unitTests"`
	// Indicates if a PDF report containing the Dataset profile should be generated
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	Report *bool `json:"report,omitempty" protobuf:"varint,14,opt,name=report"`
	// Indicates if the Dataset should be checked against the validation rules of its Data Source
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	UnitTest *bool `json:"unitTest,omitempty" protobuf:"varint,15,opt,name=unitTest"`
	// Indicates if a Feature Histogram will be generated from the dataset's columns. A feature histogram
	// may only be generated if the dataset profile succeeded
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	GenerateFeatureHistogram *bool `json:"generateFeatureHistogram,omitempty" protobuf:"varint,16,opt,name=generateFeatureHistogram"`
	// Indicates if the Dataset should be quickly processed. If enabled, the validation, profiling, and reporting will be skipped
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Fast *bool `json:"fast,omitempty" protobuf:"varint,17,opt,name=fast"`
	// Sample specifies the configuration to sample the dataset's data
	// +kubebuilder:validation:Optional
	Sample *SampleSpec `json:"sample,omitempty" protobuf:"bytes,18,opt,name=sample"`
	// Synthetic specifies to configuration to augment the dataset with synthetic data
	// +kubebuilder:validation:Optional
	Synthetic *SyntheticSpec `json:"synthetic,omitempty" protobuf:"bytes,19,opt,name=synthetic "`
	// The specification for how to find the correlations of the Dataset's features during the profiling phase.
	// Based on the specification, the data plane will compute the correlation between each feature and will store the highest-scoring
	// +kubebuilder:validation:Optional
	Correlation *CorrelationSpec `json:"correlation,omitempty" protobuf:"bytes,20,opt,name=correlation"`
	// GroupBy specifies the configuration to partition the dataset by groups
	// By default, this dataset is assigned
	GroupBy *GroupBySpec `json:"groupBy,omitempty" protobuf:"bytes,24,opt,name=groupBy"`
	// Key contains the collection of group keys, in the case that the dataset represents hierarchical time-series data.
	// The keys will be used to perform a group by
	// +kubebuilder:validation:Optional
	Key []string `json:"key,omitempty" protobuf:"bytes,26,rep,name=key"`
	// ModelClassName specifies the name of the Model Class which created the Dataset, if applicable
	// +kubebuilder:validation:Optional
	ModelClassName *string `json:"modelClassName,omitempty" protobuf:"bytes,28,opt,name=modelClassName"`
	// FeatureGroupName specifies the name of the Feature Group that will be used to create the Dataset, if applicable
	// +kubebuilder:validation:Optional
	FeatureGroupName *string `json:"featureGroupName,omitempty" protobuf:"bytes,29,opt,name=featureGroupName"`
	// Notification specifies the configuration for Alerts generated by the resource
	//+kubebuilder:validation:Optional
	Notification *catalog.NotificationSpec `json:"notification,omitempty" protobuf:"bytes,30,opt,name=notification"`
}

DatasetSpec defines the desired parameters for a Dataset

func (*DatasetSpec) DeepCopy

func (in *DatasetSpec) DeepCopy() *DatasetSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpec.

func (*DatasetSpec) DeepCopyInto

func (in *DatasetSpec) DeepCopyInto(out *DatasetSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetSpec) Descriptor

func (*DatasetSpec) Descriptor() ([]byte, []int)

func (*DatasetSpec) Marshal

func (m *DatasetSpec) Marshal() (dAtA []byte, err error)

func (*DatasetSpec) MarshalTo

func (m *DatasetSpec) MarshalTo(dAtA []byte) (int, error)

func (*DatasetSpec) MarshalToSizedBuffer

func (m *DatasetSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetSpec) ProtoMessage

func (*DatasetSpec) ProtoMessage()

func (*DatasetSpec) Reset

func (m *DatasetSpec) Reset()

func (*DatasetSpec) Size

func (m *DatasetSpec) Size() (n int)

func (*DatasetSpec) String

func (this *DatasetSpec) String() string

func (*DatasetSpec) Unmarshal

func (m *DatasetSpec) Unmarshal(dAtA []byte) error

func (*DatasetSpec) XXX_DiscardUnknown

func (m *DatasetSpec) XXX_DiscardUnknown()

func (*DatasetSpec) XXX_Marshal

func (m *DatasetSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetSpec) XXX_Merge

func (m *DatasetSpec) XXX_Merge(src proto.Message)

func (*DatasetSpec) XXX_Size

func (m *DatasetSpec) XXX_Size() int

func (*DatasetSpec) XXX_Unmarshal

func (m *DatasetSpec) XXX_Unmarshal(b []byte) error

type DatasetStatistics

type DatasetStatistics struct {
	// Columns contains the collection of statistics for each feature
	// +kubebuilder:validation:Optional
	Features []FeatureStatistics `json:"features,omitempty" protobuf:"bytes,1,rep,name=features"`
	// Rows specifies the number of rows observed in the dataset
	// +kubebuilder:validation:Optional
	Rows int32 `json:"rows,omitempty" protobuf:"varint,3,opt,name=rows"`
	// Columns specifies the number of columns observed from the data
	// +kubebuilder:validation:Optional
	Columns int32 `json:"columns,omitempty" protobuf:"varint,4,opt,name=columns"`
	// SizeInBytes specifies the size of the data in bytes
	// +kubebuilder:validation:Optional
	SizeInBytes int32 `json:"sizeInBytes,omitempty" protobuf:"varint,5,opt,name=sizeInBytes"`
	// The top correlations between all features and the target feature
	// +kubebuilder:validation:Optional
	CorrelationsWithTarget []Correlation `json:"correlationsWithTarget,omitempty" protobuf:"bytes,6,rep,name=correlationsWithTarget"`
	// The top correlations between features computed per the CorrelationSpec of the dataset
	// +kubebuilder:validation:Optional
	TopCorrelations []Correlation `json:"topCorrelations,omitempty" protobuf:"bytes,7,rep,name=topCorrelations"`
}

DatasetStatistics contains statistics about the Dataset and every feature in the dataset

func (*DatasetStatistics) DeepCopy

func (in *DatasetStatistics) DeepCopy() *DatasetStatistics

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetStatistics.

func (*DatasetStatistics) DeepCopyInto

func (in *DatasetStatistics) DeepCopyInto(out *DatasetStatistics)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetStatistics) Descriptor

func (*DatasetStatistics) Descriptor() ([]byte, []int)

func (*DatasetStatistics) Marshal

func (m *DatasetStatistics) Marshal() (dAtA []byte, err error)

func (*DatasetStatistics) MarshalTo

func (m *DatasetStatistics) MarshalTo(dAtA []byte) (int, error)

func (*DatasetStatistics) MarshalToSizedBuffer

func (m *DatasetStatistics) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetStatistics) ProtoMessage

func (*DatasetStatistics) ProtoMessage()

func (*DatasetStatistics) Reset

func (m *DatasetStatistics) Reset()

func (*DatasetStatistics) Size

func (m *DatasetStatistics) Size() (n int)

func (*DatasetStatistics) String

func (this *DatasetStatistics) String() string

func (*DatasetStatistics) Unmarshal

func (m *DatasetStatistics) Unmarshal(dAtA []byte) error

func (*DatasetStatistics) XXX_DiscardUnknown

func (m *DatasetStatistics) XXX_DiscardUnknown()

func (*DatasetStatistics) XXX_Marshal

func (m *DatasetStatistics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetStatistics) XXX_Merge

func (m *DatasetStatistics) XXX_Merge(src proto.Message)

func (*DatasetStatistics) XXX_Size

func (m *DatasetStatistics) XXX_Size() int

func (*DatasetStatistics) XXX_Unmarshal

func (m *DatasetStatistics) XXX_Unmarshal(b []byte) error

type DatasetStatus

type DatasetStatus struct {
	// ObservedGeneration specifies the last generation that was reconciled
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// Active contains a collection of references to currently active snapshots
	// +optional
	Active catalog.RunReferenceList `json:"active,omitempty" protobuf:"bytes,2,rep,name=active"`
	// LastSnapshotVersion contains the integer version last used to create a snapshot
	// +kubebuilder:default:=0
	LastSnapshotVersion catalog.Version `json:"lastSnapshotVersion" protobuf:"varint,4,opt,name=lastSnapshotVersion"`
	// AvailableSnapshots contains the collection of snapshot references which are ready for use.
	// Each version corresponds with an existing DatasetSnapshot resource
	AvailableSnapshots catalog.RunReferenceList `json:"availableSnapshots,omitempty" protobuf:"bytes,5,opt,name=availableSnapshots"`
	// LastSnapshotAt specifies the time at which a snapshot was last created for the Dataset
	//+kubebuilder:validation:Optional
	LastSnapshotAt *metav1.Time `json:"lastSnapshotAt,omitempty" protobuf:"bytes,6,opt,name=lastSnapshotAt"`
	// LastFailureMessage specifies the failure message of the last snapshot. If the last snapshot succeeded, the field will be cleared
	//+kubebuilder:validation:Optional
	LastFailureMessage *string `json:"lastFailureMessage,omitempty" protobuf:"bytes,7,opt,name=lastFailureMessage"`
	// UpdatedAt specifies the last time the Dataset was changed
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,8,opt,name=updatedAt"`
	// Schedule specifies the status of the run schedule
	//+kubebuilder:validation:Optional
	Schedule catalog.RunScheduleStatus `json:"schedule,omitempty" protobuf:"bytes,9,opt,name=schedule"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,10,rep,name=conditions"`
}

DatasetStatus defines the observed state of a Dataset object

func (*DatasetStatus) DeepCopy

func (in *DatasetStatus) DeepCopy() *DatasetStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetStatus.

func (*DatasetStatus) DeepCopyInto

func (in *DatasetStatus) DeepCopyInto(out *DatasetStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetStatus) Descriptor

func (*DatasetStatus) Descriptor() ([]byte, []int)

func (*DatasetStatus) Marshal

func (m *DatasetStatus) Marshal() (dAtA []byte, err error)

func (*DatasetStatus) MarshalTo

func (m *DatasetStatus) MarshalTo(dAtA []byte) (int, error)

func (*DatasetStatus) MarshalToSizedBuffer

func (m *DatasetStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetStatus) ProtoMessage

func (*DatasetStatus) ProtoMessage()

func (*DatasetStatus) Reset

func (m *DatasetStatus) Reset()

func (*DatasetStatus) Size

func (m *DatasetStatus) Size() (n int)

func (*DatasetStatus) String

func (this *DatasetStatus) String() string

func (*DatasetStatus) Unmarshal

func (m *DatasetStatus) Unmarshal(dAtA []byte) error

func (*DatasetStatus) XXX_DiscardUnknown

func (m *DatasetStatus) XXX_DiscardUnknown()

func (*DatasetStatus) XXX_Marshal

func (m *DatasetStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetStatus) XXX_Merge

func (m *DatasetStatus) XXX_Merge(src proto.Message)

func (*DatasetStatus) XXX_Size

func (m *DatasetStatus) XXX_Size() int

func (*DatasetStatus) XXX_Unmarshal

func (m *DatasetStatus) XXX_Unmarshal(b []byte) error

type DecisionType added in v0.6.252

type DecisionType string

type Delimiter

type Delimiter string

Delimiter specify char or group of char +kubebuilder:validation:Enum="crlf";"cr";"lf";"semicolon";"colon";"comma";"tab";"space";"pipe";"auto"

const (
	DelimiterCRLF      Delimiter = "crlf"
	DelimiterCR        Delimiter = "cr"
	DelimiterLF        Delimiter = "lf"
	DelimiterSemicolon Delimiter = "semicolon"
	DelimiterColon     Delimiter = "colon"
	DelimiterComma     Delimiter = "comma"
	DelimiterTab       Delimiter = "tab"
	DelimiterSpace     Delimiter = "space"
	DelimiterPipe      Delimiter = "pipe"
	DelimiterAuto      Delimiter = "auto"
)

type DriftThreshold added in v0.4.1017

type DriftThreshold struct {
	// The metric type name (e.g. F1 / Accuracy)
	// +kubebuilder:validation:Required
	Metric catalog.Metric `json:"metric" protobuf:"bytes,1,opt,name=metric"`
	// The value of the metric for quantitive observations
	// +kubebuilder:validation:Required
	// +required
	Value float64 `json:"value" protobuf:"bytes,2,opt,name=value"`
}

Define a threshold

func (*DriftThreshold) DeepCopy added in v0.4.1017

func (in *DriftThreshold) DeepCopy() *DriftThreshold

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DriftThreshold.

func (*DriftThreshold) DeepCopyInto added in v0.4.1017

func (in *DriftThreshold) DeepCopyInto(out *DriftThreshold)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DriftThreshold) Descriptor added in v0.4.1017

func (*DriftThreshold) Descriptor() ([]byte, []int)

func (*DriftThreshold) Marshal added in v0.4.1017

func (m *DriftThreshold) Marshal() (dAtA []byte, err error)

func (*DriftThreshold) MarshalTo added in v0.4.1017

func (m *DriftThreshold) MarshalTo(dAtA []byte) (int, error)

func (*DriftThreshold) MarshalToSizedBuffer added in v0.4.1017

func (m *DriftThreshold) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DriftThreshold) ProtoMessage added in v0.4.1017

func (*DriftThreshold) ProtoMessage()

func (*DriftThreshold) Reset added in v0.4.1017

func (m *DriftThreshold) Reset()

func (*DriftThreshold) Size added in v0.4.1017

func (m *DriftThreshold) Size() (n int)

func (*DriftThreshold) String added in v0.4.1017

func (this *DriftThreshold) String() string

func (*DriftThreshold) Unmarshal added in v0.4.1017

func (m *DriftThreshold) Unmarshal(dAtA []byte) error

func (*DriftThreshold) XXX_DiscardUnknown added in v0.4.1017

func (m *DriftThreshold) XXX_DiscardUnknown()

func (*DriftThreshold) XXX_Marshal added in v0.4.1017

func (m *DriftThreshold) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DriftThreshold) XXX_Merge added in v0.4.1017

func (m *DriftThreshold) XXX_Merge(src proto.Message)

func (*DriftThreshold) XXX_Size added in v0.4.1017

func (m *DriftThreshold) XXX_Size() int

func (*DriftThreshold) XXX_Unmarshal added in v0.4.1017

func (m *DriftThreshold) XXX_Unmarshal(b []byte) error

type Entity

type Entity struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              EntitySpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status EntityStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:storageversion +kubebuilder:resource:path=entities,singular=entity,shortName=et,categories={data,modela} +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.version" +kubebuilder:printcolumn:name="JoinKey",type="string",JSONPath=".spec.joinKey" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" Entity represents a entity object

func (*Entity) AddFinalizer

func (entity *Entity) AddFinalizer()

func (*Entity) CreateOrUpdateCond

func (entity *Entity) CreateOrUpdateCond(cond metav1.Condition)

Merge or update condition

func (*Entity) DeepCopy

func (in *Entity) DeepCopy() *Entity

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Entity.

func (*Entity) DeepCopyInto

func (in *Entity) DeepCopyInto(out *Entity)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Entity) DeepCopyObject

func (in *Entity) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Entity) Default

func (entity *Entity) Default()

No defaults in this current release

func (*Entity) Descriptor

func (*Entity) Descriptor() ([]byte, []int)

func (*Entity) GetCond

func (entity *Entity) GetCond(t string) metav1.Condition

func (*Entity) GetCondIdx

func (entity *Entity) GetCondIdx(t string) int

func (Entity) GetObservedGeneration added in v0.6.252

func (entity Entity) GetObservedGeneration() int64

func (Entity) GetStatus added in v0.6.252

func (entity Entity) GetStatus() proto.Message

func (Entity) HasFinalizer

func (entity Entity) HasFinalizer() bool

func (*Entity) IsGitObj

func (entity *Entity) IsGitObj() bool

func (Entity) IsReady

func (entity Entity) IsReady() bool

func (Entity) Key

func (entity Entity) Key() string

func (*Entity) LabelWithCommit

func (entity *Entity) LabelWithCommit(commit string, uname string, branch string)

func (*Entity) MarkReady

func (entity *Entity) MarkReady()

func (*Entity) Marshal

func (m *Entity) Marshal() (dAtA []byte, err error)

func (*Entity) MarshalTo

func (m *Entity) MarshalTo(dAtA []byte) (int, error)

func (*Entity) MarshalToSizedBuffer

func (m *Entity) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Entity) ProtoMessage

func (*Entity) ProtoMessage()

func (*Entity) RemoveFinalizer

func (entity *Entity) RemoveFinalizer()

func (Entity) RepEntry

func (entity Entity) RepEntry() (string, error)

func (Entity) RepPath

func (entity Entity) RepPath(root string) (string, error)

Return the on disk rep location

func (*Entity) Reset

func (m *Entity) Reset()

func (*Entity) SetChanged

func (entity *Entity) SetChanged()

func (Entity) SetObservedGeneration added in v0.6.252

func (entity Entity) SetObservedGeneration(generation int64)

func (*Entity) SetStatus added in v0.6.252

func (entity *Entity) SetStatus(status interface{})

func (Entity) SetUpdatedAt added in v0.6.252

func (entity Entity) SetUpdatedAt(time *metav1.Time)

func (*Entity) SetupWebhookWithManager

func (entity *Entity) SetupWebhookWithManager(mgr ctrl.Manager) error

Set up the webhook with the manager.

func (*Entity) Size

func (m *Entity) Size() (n int)

func (*Entity) String

func (this *Entity) String() string

func (*Entity) Unmarshal

func (m *Entity) Unmarshal(dAtA []byte) error

func (Entity) ValidateCreate

func (entity Entity) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (Entity) ValidateDelete

func (entity Entity) ValidateDelete() error

func (Entity) ValidateUpdate

func (entity Entity) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*Entity) XXX_DiscardUnknown

func (m *Entity) XXX_DiscardUnknown()

func (*Entity) XXX_Marshal

func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Entity) XXX_Merge

func (m *Entity) XXX_Merge(src proto.Message)

func (*Entity) XXX_Size

func (m *Entity) XXX_Size() int

func (*Entity) XXX_Unmarshal

func (m *Entity) XXX_Unmarshal(b []byte) error

type EntityConditionType

type EntityConditionType string

Condition on the entity

type EntityList

type EntityList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []Entity `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true EntityList contains a list of Entity

func (*EntityList) DeepCopy

func (in *EntityList) DeepCopy() *EntityList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntityList.

func (*EntityList) DeepCopyInto

func (in *EntityList) DeepCopyInto(out *EntityList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*EntityList) DeepCopyObject

func (in *EntityList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*EntityList) Descriptor

func (*EntityList) Descriptor() ([]byte, []int)

func (*EntityList) Marshal

func (m *EntityList) Marshal() (dAtA []byte, err error)

func (*EntityList) MarshalTo

func (m *EntityList) MarshalTo(dAtA []byte) (int, error)

func (*EntityList) MarshalToSizedBuffer

func (m *EntityList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*EntityList) ProtoMessage

func (*EntityList) ProtoMessage()

func (*EntityList) Reset

func (m *EntityList) Reset()

func (*EntityList) Size

func (m *EntityList) Size() (n int)

func (*EntityList) String

func (this *EntityList) String() string

func (*EntityList) Unmarshal

func (m *EntityList) Unmarshal(dAtA []byte) error

func (*EntityList) XXX_DiscardUnknown

func (m *EntityList) XXX_DiscardUnknown()

func (*EntityList) XXX_Marshal

func (m *EntityList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*EntityList) XXX_Merge

func (m *EntityList) XXX_Merge(src proto.Message)

func (*EntityList) XXX_Size

func (m *EntityList) XXX_Size() int

func (*EntityList) XXX_Unmarshal

func (m *EntityList) XXX_Unmarshal(b []byte) error

type EntitySpec

type EntitySpec struct {
	// The reference to the tenant which the object exists under
	// +kubebuilder:validation:Optional
	TenantRef *v1.ObjectReference `json:"tenantRef,omitempty" protobuf:"bytes,1,opt,name=tenantRef"`
	// Description of the entity
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// Join keys are the way to join all the feature groups
	// Join key can be either UUID or seq.
	// Entity must have a join key
	JoinKey string `json:"joinKey,omitempty" protobuf:"bytes,4,rep,name=joinKey"`
	// Owner of this Entity
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,5,opt,name=owner"`
	// Tags for this feature groups
	// +kubebuilder:validation:Optional
	Tags []string `json:"tags,omitempty" protobuf:"bytes,6,rep,name=tags"`
}

EntitySpec contain the desired state of a Entity.

func (*EntitySpec) DeepCopy

func (in *EntitySpec) DeepCopy() *EntitySpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntitySpec.

func (*EntitySpec) DeepCopyInto

func (in *EntitySpec) DeepCopyInto(out *EntitySpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*EntitySpec) Descriptor

func (*EntitySpec) Descriptor() ([]byte, []int)

func (*EntitySpec) Marshal

func (m *EntitySpec) Marshal() (dAtA []byte, err error)

func (*EntitySpec) MarshalTo

func (m *EntitySpec) MarshalTo(dAtA []byte) (int, error)

func (*EntitySpec) MarshalToSizedBuffer

func (m *EntitySpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*EntitySpec) ProtoMessage

func (*EntitySpec) ProtoMessage()

func (*EntitySpec) Reset

func (m *EntitySpec) Reset()

func (*EntitySpec) Size

func (m *EntitySpec) Size() (n int)

func (*EntitySpec) String

func (this *EntitySpec) String() string

func (*EntitySpec) Unmarshal

func (m *EntitySpec) Unmarshal(dAtA []byte) error

func (*EntitySpec) XXX_DiscardUnknown

func (m *EntitySpec) XXX_DiscardUnknown()

func (*EntitySpec) XXX_Marshal

func (m *EntitySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*EntitySpec) XXX_Merge

func (m *EntitySpec) XXX_Merge(src proto.Message)

func (*EntitySpec) XXX_Size

func (m *EntitySpec) XXX_Size() int

func (*EntitySpec) XXX_Unmarshal

func (m *EntitySpec) XXX_Unmarshal(b []byte) error

type EntityStatus

type EntityStatus struct {
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`

	// Last time the object was updated
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,2,opt,name=updatedAt"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,3,rep,name=conditions"`
}

EntityStatus defines the observed state of Entity

func (*EntityStatus) DeepCopy

func (in *EntityStatus) DeepCopy() *EntityStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntityStatus.

func (*EntityStatus) DeepCopyInto

func (in *EntityStatus) DeepCopyInto(out *EntityStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*EntityStatus) Descriptor

func (*EntityStatus) Descriptor() ([]byte, []int)

func (*EntityStatus) Marshal

func (m *EntityStatus) Marshal() (dAtA []byte, err error)

func (*EntityStatus) MarshalTo

func (m *EntityStatus) MarshalTo(dAtA []byte) (int, error)

func (*EntityStatus) MarshalToSizedBuffer

func (m *EntityStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*EntityStatus) ProtoMessage

func (*EntityStatus) ProtoMessage()

func (*EntityStatus) Reset

func (m *EntityStatus) Reset()

func (*EntityStatus) Size

func (m *EntityStatus) Size() (n int)

func (*EntityStatus) String

func (this *EntityStatus) String() string

func (*EntityStatus) Unmarshal

func (m *EntityStatus) Unmarshal(dAtA []byte) error

func (*EntityStatus) XXX_DiscardUnknown

func (m *EntityStatus) XXX_DiscardUnknown()

func (*EntityStatus) XXX_Marshal

func (m *EntityStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*EntityStatus) XXX_Merge

func (m *EntityStatus) XXX_Merge(src proto.Message)

func (*EntityStatus) XXX_Size

func (m *EntityStatus) XXX_Size() int

func (*EntityStatus) XXX_Unmarshal

func (m *EntityStatus) XXX_Unmarshal(b []byte) error

type EscapeChar

type EscapeChar string

+kubebuilder:validation:Enum="single-quote";"double-quote";"tilda";"none";"auto"

const (
	SingleEscapeChar EscapeChar = "single-quote"
	DoubleEscapeChar EscapeChar = "double-quote"
	TildaEscapeChar  EscapeChar = "tilda"
	NoneEscapeChar   EscapeChar = "none"
	AutoEscapeChar   EscapeChar = "auto"
)

type ExcelNotebookSpec

type ExcelNotebookSpec struct {
	// Indicates if the excel reader should use the first sheet that contains data
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	FirstSheetWithData *bool `json:"firstSheetWithData,omitempty" protobuf:"varint,1,opt,name=firstSheetWithData"`
	// The name of the sheet that exists in the excel file to read data from
	// +kubebuilder:validation:Optional
	SheetName *string `json:"sheetName,omitempty" protobuf:"bytes,2,opt,name=sheetName"`
	// The index of the sheet in the excel file to read data from
	// +kubebuilder:validation:Optional
	SheetIndex *int32 `json:"sheetIndex,omitempty" protobuf:"varint,3,opt,name=sheetIndex"`
	// The position of the row that contains the column names (i.e. the header)
	// +kubebuilder:validation:Optional
	ColumnNamesRow *int32 `json:"columnNameRow,omitempty" protobuf:"varint,4,opt,name=columnNameRow"`
	// The specification for the bounds of the data
	// +kubebuilder:validation:Optional
	Data ExcelSheetArea `json:"data,omitempty" protobuf:"bytes,5,opt,name=data"`
}

ExcelNotebookSpec specifies the format of an excel file

func (*ExcelNotebookSpec) DeepCopy

func (in *ExcelNotebookSpec) DeepCopy() *ExcelNotebookSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExcelNotebookSpec.

func (*ExcelNotebookSpec) DeepCopyInto

func (in *ExcelNotebookSpec) DeepCopyInto(out *ExcelNotebookSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ExcelNotebookSpec) Descriptor

func (*ExcelNotebookSpec) Descriptor() ([]byte, []int)

func (*ExcelNotebookSpec) Marshal

func (m *ExcelNotebookSpec) Marshal() (dAtA []byte, err error)

func (*ExcelNotebookSpec) MarshalTo

func (m *ExcelNotebookSpec) MarshalTo(dAtA []byte) (int, error)

func (*ExcelNotebookSpec) MarshalToSizedBuffer

func (m *ExcelNotebookSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ExcelNotebookSpec) ProtoMessage

func (*ExcelNotebookSpec) ProtoMessage()

func (*ExcelNotebookSpec) Reset

func (m *ExcelNotebookSpec) Reset()

func (*ExcelNotebookSpec) Size

func (m *ExcelNotebookSpec) Size() (n int)

func (*ExcelNotebookSpec) String

func (this *ExcelNotebookSpec) String() string

func (*ExcelNotebookSpec) Unmarshal

func (m *ExcelNotebookSpec) Unmarshal(dAtA []byte) error

func (*ExcelNotebookSpec) XXX_DiscardUnknown

func (m *ExcelNotebookSpec) XXX_DiscardUnknown()

func (*ExcelNotebookSpec) XXX_Marshal

func (m *ExcelNotebookSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ExcelNotebookSpec) XXX_Merge

func (m *ExcelNotebookSpec) XXX_Merge(src proto.Message)

func (*ExcelNotebookSpec) XXX_Size

func (m *ExcelNotebookSpec) XXX_Size() int

func (*ExcelNotebookSpec) XXX_Unmarshal

func (m *ExcelNotebookSpec) XXX_Unmarshal(b []byte) error

type ExcelSheetArea

type ExcelSheetArea struct {
	// Indicates if the excel reader should read the entire sheet; if false, it will read only within the bounds
	// specified by the `To` and `From` fields of the ExcelSheetArea
	// +kubebuilder:default:=false
	EntireSheet *bool `json:"entireSheet,omitempty" protobuf:"varint,1,opt,name=entireSheet"`
	// If reading part of the excel sheet, start with the column in this position
	FromColumn *int32 `json:"fromColumn,omitempty" protobuf:"varint,2,opt,name=fromColumn"`
	// If reading part of the excel sheet, end with the column in this position
	ToColumn *int32 `json:"toColumn,omitempty" protobuf:"varint,3,opt,name=toColumn"`
	// If reading part of the excel sheet, start with the row in this position
	FromRow *int32 `json:"fromRow,omitempty" protobuf:"varint,4,opt,name=fromRow"`
	// If reading part of the excel sheet, end with the row in this position
	ToRow *int32 `json:"toRow,omitempty" protobuf:"varint,5,opt,name=toRow"`
}

ExcelSheetArea specifies the bounds of the data within an excel sheet

func (*ExcelSheetArea) DeepCopy

func (in *ExcelSheetArea) DeepCopy() *ExcelSheetArea

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExcelSheetArea.

func (*ExcelSheetArea) DeepCopyInto

func (in *ExcelSheetArea) DeepCopyInto(out *ExcelSheetArea)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ExcelSheetArea) Descriptor

func (*ExcelSheetArea) Descriptor() ([]byte, []int)

func (*ExcelSheetArea) Marshal

func (m *ExcelSheetArea) Marshal() (dAtA []byte, err error)

func (*ExcelSheetArea) MarshalTo

func (m *ExcelSheetArea) MarshalTo(dAtA []byte) (int, error)

func (*ExcelSheetArea) MarshalToSizedBuffer

func (m *ExcelSheetArea) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ExcelSheetArea) ProtoMessage

func (*ExcelSheetArea) ProtoMessage()

func (*ExcelSheetArea) Reset

func (m *ExcelSheetArea) Reset()

func (*ExcelSheetArea) Size

func (m *ExcelSheetArea) Size() (n int)

func (*ExcelSheetArea) String

func (this *ExcelSheetArea) String() string

func (*ExcelSheetArea) Unmarshal

func (m *ExcelSheetArea) Unmarshal(dAtA []byte) error

func (*ExcelSheetArea) XXX_DiscardUnknown

func (m *ExcelSheetArea) XXX_DiscardUnknown()

func (*ExcelSheetArea) XXX_Marshal

func (m *ExcelSheetArea) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ExcelSheetArea) XXX_Merge

func (m *ExcelSheetArea) XXX_Merge(src proto.Message)

func (*ExcelSheetArea) XXX_Size

func (m *ExcelSheetArea) XXX_Size() int

func (*ExcelSheetArea) XXX_Unmarshal

func (m *ExcelSheetArea) XXX_Unmarshal(b []byte) error

type FeatureGroup added in v0.5.282

type FeatureGroup struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              FeatureGroupSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status FeatureGroupStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:storageversion +kubebuilder:resource:path=featuregroups,singular=featuregroup,shortName="fg",categories={data,modela} +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.version" +kubebuilder:printcolumn:name="Entity",type="string",JSONPath=".spec.entityName" +kubebuilder:printcolumn:name="Schedule",type="string",JSONPath=".spec.schedule",description="" +kubebuilder:printcolumn:name="Next Ingest",type="date",JSONPath=".status.ingestSchedule.nextRunAt",description="" +kubebuilder:printcolumn:name="Next Sync",type="date",JSONPath=".status.syncScedule.nextRunAt",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" FeatureGroup represent a group of features

func ParseFeatureGroupYaml added in v0.5.282

func ParseFeatureGroupYaml(content []byte) (*FeatureGroup, error)

func (*FeatureGroup) AddConfiditions added in v0.5.282

func (fg *FeatureGroup) AddConfiditions()

func (*FeatureGroup) AddFinalizer added in v0.5.282

func (fg *FeatureGroup) AddFinalizer()

func (*FeatureGroup) CreateOrUpdateCond added in v0.5.282

func (fg *FeatureGroup) CreateOrUpdateCond(cond metav1.Condition)

Merge or update condition Merge or update condition

func (*FeatureGroup) DeepCopy added in v0.5.282

func (in *FeatureGroup) DeepCopy() *FeatureGroup

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureGroup.

func (*FeatureGroup) DeepCopyInto added in v0.5.282

func (in *FeatureGroup) DeepCopyInto(out *FeatureGroup)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureGroup) DeepCopyObject added in v0.5.282

func (in *FeatureGroup) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeatureGroup) Default added in v0.5.282

func (fg *FeatureGroup) Default()

No defaults in this current release

func (*FeatureGroup) Descriptor added in v0.5.282

func (*FeatureGroup) Descriptor() ([]byte, []int)

func (FeatureGroup) ErrorAlert added in v0.5.309

func (fh FeatureGroup) ErrorAlert(notification catalog.NotificationSpec, err error) *infra.Alert

func (FeatureGroup) GetCond added in v0.5.282

func (fg FeatureGroup) GetCond(t string) metav1.Condition

func (FeatureGroup) GetCondIdx added in v0.5.282

func (fg FeatureGroup) GetCondIdx(t string) int

func (FeatureGroup) GetObservedGeneration added in v0.6.252

func (featuregroup FeatureGroup) GetObservedGeneration() int64

func (FeatureGroup) GetStatus added in v0.6.252

func (featuregroup FeatureGroup) GetStatus() proto.Message

func (*FeatureGroup) HasFinalizer added in v0.5.282

func (fg *FeatureGroup) HasFinalizer() bool

func (FeatureGroup) IsDeleted added in v0.5.315

func (fg FeatureGroup) IsDeleted() bool

func (*FeatureGroup) IsGitObj added in v0.5.282

func (fg *FeatureGroup) IsGitObj() bool

func (*FeatureGroup) IsIngesting added in v0.5.373

func (fg *FeatureGroup) IsIngesting() bool

func (FeatureGroup) IsReady added in v0.5.282

func (fg FeatureGroup) IsReady() bool

func (*FeatureGroup) IsSynced added in v0.5.315

func (fg *FeatureGroup) IsSynced() bool

func (*FeatureGroup) IsSynching added in v0.5.373

func (fg *FeatureGroup) IsSynching() bool

func (FeatureGroup) Key added in v0.5.282

func (fg FeatureGroup) Key() string

func (*FeatureGroup) LabelWithCommit added in v0.5.282

func (fg *FeatureGroup) LabelWithCommit(commit string, uname string, branch string)

func (*FeatureGroup) MarkIngestFailed added in v0.5.327

func (fg *FeatureGroup) MarkIngestFailed(msg string)

func (*FeatureGroup) MarkIngested added in v0.5.327

func (fg *FeatureGroup) MarkIngested()

func (*FeatureGroup) MarkIngesting added in v0.5.327

func (fg *FeatureGroup) MarkIngesting()

////////////////////////////////////////////// Ingest //////////////////////////////////////////////

func (*FeatureGroup) MarkReady added in v0.5.282

func (fg *FeatureGroup) MarkReady()

func (*FeatureGroup) MarkSyncFailed added in v0.5.308

func (fg *FeatureGroup) MarkSyncFailed(msg string)

func (*FeatureGroup) MarkSynced added in v0.5.306

func (fg *FeatureGroup) MarkSynced()

func (*FeatureGroup) MarkSyncing added in v0.5.315

func (fg *FeatureGroup) MarkSyncing()

///////////////////////////////////////////// Sync ////////////////////////////////////////////

func (*FeatureGroup) Marshal added in v0.5.282

func (m *FeatureGroup) Marshal() (dAtA []byte, err error)

func (*FeatureGroup) MarshalTo added in v0.5.282

func (m *FeatureGroup) MarshalTo(dAtA []byte) (int, error)

func (*FeatureGroup) MarshalToSizedBuffer added in v0.5.282

func (m *FeatureGroup) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureGroup) PrefixLiveURI added in v0.5.472

func (fg *FeatureGroup) PrefixLiveURI(path string) string

func (*FeatureGroup) ProtoMessage added in v0.5.282

func (*FeatureGroup) ProtoMessage()

func (*FeatureGroup) RemoveFinalizer added in v0.5.282

func (fg *FeatureGroup) RemoveFinalizer()

func (*FeatureGroup) RepEntry added in v0.5.282

func (fg *FeatureGroup) RepEntry() (string, error)

func (*FeatureGroup) RepPath added in v0.5.282

func (fg *FeatureGroup) RepPath(root string) (string, error)

Return the on disk rep location

func (*FeatureGroup) Reset added in v0.5.282

func (m *FeatureGroup) Reset()

func (*FeatureGroup) SetChanged added in v0.5.282

func (fg *FeatureGroup) SetChanged()

func (*FeatureGroup) SetObservedGeneration added in v0.6.252

func (featuregroup *FeatureGroup) SetObservedGeneration(generation int64)

func (*FeatureGroup) SetStatus added in v0.6.252

func (featuregroup *FeatureGroup) SetStatus(status interface{})

func (*FeatureGroup) SetUpdatedAt added in v0.6.252

func (featuregroup *FeatureGroup) SetUpdatedAt(time *metav1.Time)

func (*FeatureGroup) SetupWebhookWithManager added in v0.5.282

func (fg *FeatureGroup) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*FeatureGroup) Size added in v0.5.282

func (m *FeatureGroup) Size() (n int)

func (*FeatureGroup) String added in v0.5.282

func (this *FeatureGroup) String() string

func (FeatureGroup) TenantName added in v0.5.386

func (fg FeatureGroup) TenantName() string

func (*FeatureGroup) Unmarshal added in v0.5.282

func (m *FeatureGroup) Unmarshal(dAtA []byte) error

func (FeatureGroup) ValidateCreate added in v0.5.282

func (fg FeatureGroup) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (FeatureGroup) ValidateDelete added in v0.5.282

func (fg FeatureGroup) ValidateDelete() error

func (FeatureGroup) ValidateUpdate added in v0.5.282

func (fg FeatureGroup) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*FeatureGroup) XXX_DiscardUnknown added in v0.5.282

func (m *FeatureGroup) XXX_DiscardUnknown()

func (*FeatureGroup) XXX_Marshal added in v0.5.282

func (m *FeatureGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureGroup) XXX_Merge added in v0.5.282

func (m *FeatureGroup) XXX_Merge(src proto.Message)

func (*FeatureGroup) XXX_Size added in v0.5.282

func (m *FeatureGroup) XXX_Size() int

func (*FeatureGroup) XXX_Unmarshal added in v0.5.282

func (m *FeatureGroup) XXX_Unmarshal(b []byte) error

type FeatureGroupConditionType added in v0.5.282

type FeatureGroupConditionType string

FeatureGroupConditionType

type FeatureGroupList added in v0.5.282

type FeatureGroupList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []FeatureGroup `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true FeatureList contain a list of feature objects

func (*FeatureGroupList) DeepCopy added in v0.5.282

func (in *FeatureGroupList) DeepCopy() *FeatureGroupList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureGroupList.

func (*FeatureGroupList) DeepCopyInto added in v0.5.282

func (in *FeatureGroupList) DeepCopyInto(out *FeatureGroupList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureGroupList) DeepCopyObject added in v0.5.282

func (in *FeatureGroupList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeatureGroupList) Descriptor added in v0.5.282

func (*FeatureGroupList) Descriptor() ([]byte, []int)

func (*FeatureGroupList) Marshal added in v0.5.282

func (m *FeatureGroupList) Marshal() (dAtA []byte, err error)

func (*FeatureGroupList) MarshalTo added in v0.5.282

func (m *FeatureGroupList) MarshalTo(dAtA []byte) (int, error)

func (*FeatureGroupList) MarshalToSizedBuffer added in v0.5.282

func (m *FeatureGroupList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureGroupList) ProtoMessage added in v0.5.282

func (*FeatureGroupList) ProtoMessage()

func (*FeatureGroupList) Reset added in v0.5.282

func (m *FeatureGroupList) Reset()

func (*FeatureGroupList) Size added in v0.5.282

func (m *FeatureGroupList) Size() (n int)

func (*FeatureGroupList) String added in v0.5.282

func (this *FeatureGroupList) String() string

func (*FeatureGroupList) Unmarshal added in v0.5.282

func (m *FeatureGroupList) Unmarshal(dAtA []byte) error

func (*FeatureGroupList) XXX_DiscardUnknown added in v0.5.282

func (m *FeatureGroupList) XXX_DiscardUnknown()

func (*FeatureGroupList) XXX_Marshal added in v0.5.282

func (m *FeatureGroupList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureGroupList) XXX_Merge added in v0.5.282

func (m *FeatureGroupList) XXX_Merge(src proto.Message)

func (*FeatureGroupList) XXX_Size added in v0.5.282

func (m *FeatureGroupList) XXX_Size() int

func (*FeatureGroupList) XXX_Unmarshal added in v0.5.282

func (m *FeatureGroupList) XXX_Unmarshal(b []byte) error

type FeatureGroupPhase added in v0.5.306

type FeatureGroupPhase string
const (
	FeatureGroupPhaseSyncing   FeatureGroupPhase = "Syncing"
	FeatureGroupPhaseIngesting FeatureGroupPhase = "Ingesting"
	FeatureGroupPhaseFailed    FeatureGroupPhase = "Failed"
	FeatureGroupPhaseReady     FeatureGroupPhase = "Ready"
)

type FeatureGroupSpec added in v0.5.282

type FeatureGroupSpec struct {
	// The reference to the tenant which the object exists under
	// +kubebuilder:validation:Optional
	TenantRef *v1.ObjectReference `json:"tenantRef,omitempty" protobuf:"bytes,1,opt,name=tenantRef"`
	// Owner is the owner of the feature group.
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,2,opt,name=owner"`
	// Description of the feature group.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	// +kubebuilder:validation:MaxLength=512
	Description string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// The name of the Virtual Bucket where artifacts generated by Datasets related to the
	// Feature Group will be stored. If empty, it will default to the default Virtual Bucket of the Tenant
	// +kubebuilder:validation:Optional
	ArtifactBucketName *string `json:"artifactBucketName,omitempty" protobuf:"bytes,4,opt,name=artifactBucketName"`
	// How this group is ingested
	// +kubebuilder:default:="batch"
	// +kubebuilder:validation:Optional
	IngestType *catalog.FeatureStoreIngestType `json:"ingestType,omitempty" protobuf:"bytes,5,opt,name=ingestType"`
	// A feature group must be part of an entity.
	EntityName string `json:"entityName,omitempty" protobuf:"bytes,6,opt,name=entityName"`
	// Tags for this feature groups
	// +kubebuilder:validation:Optional
	Tags []string `json:"tags,omitempty" protobuf:"bytes,7,rep,name=tags"`
	// Schedule for running ingesting the data from the feature.
	// On virtual features (e.g. where the data already reside in a table)
	// The ingest will just perform feature profile, and run the feature group unit tests.
	// +kubebuilder:validation:Optional
	IngestSchedule catalog.RunSchedule `json:"ingestSchedule,omitempty" protobuf:"bytes,8,opt,name=ingestSchedule"`
	// Schedule for running ingesting the data from the feature.
	// On virtual features (e.g. where the data already reside in a table)
	// The ingest will just perform feature profile, and run the feature group unit tests.
	// +kubebuilder:validation:Optional
	SyncSchedule catalog.RunSchedule `json:"syncSchedule,omitempty" protobuf:"bytes,9,opt,name=syncSchedule"`
	// In case where the feature group data is stored as flat file. the flat file format
	// define how to read the file.
	// +kubebuilder:validation:Optional
	FlatFile *FlatFileFormatSpec `json:"flatfile,omitempty" protobuf:"bytes,10,opt,name=flatfile"`
	// The name of the data source which contain the schema for this entity
	// +kubebuilder:validation:Optional
	Schema Schema `json:"schema,omitempty" protobuf:"bytes,11,opt,name=schema"`
	// Unit test to run on data from this feature group upon ingrest.
	// +kubebuilder:validation:Optional
	Tests catalog.TestSuite `json:"tests,omitempty" protobuf:"bytes,12,opt,name=tests"`
	// Specify the data for this feature group
	// This can be a table,  a view or a file on S3.
	// +kubebuilder:validation:Optional
	Location catalog.DataLocation `json:"location,omitempty" protobuf:"bytes,13,opt,name=location"`
	// the time column index. Might be null, if the fg does not have time column.
	// +kubebuilder:validation:Optional
	TimeColumn *string `json:"timeColumn,omitempty" protobuf:"bytes,14,opt,name=timeColumn"`
	// The time column format
	// +kubebuilder:validation:Optional
	TimeColumnFormat *string `json:"timeColumnFormat,omitempty" protobuf:"bytes,15,opt,name=timeColumnFormat"`
	// The feature group primary key. This is usually the key that is used to join the feature groups
	// to other feature groups in the entity
	KeyColumn *string `json:"keyColumn,omitempty" protobuf:"bytes,16,opt,name=keyColumn"`
	// Materialization
	// +kubebuilder:validation:Optional
	Materialization MaterializationSpec `json:"materialization,omitempty" protobuf:"bytes,17,opt,name=materialization"`
	// Resources used for ingest and the sync
	//+kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,18,opt,name=resources"`
	// The reference to the Lab under which Jobs created by the Feature Group will be executed. If empty, it
	// will default to the default Lab of the Tenant of the Feature Group.
	// +kubebuilder:validation:Optional
	LabRef *v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,19,opt,name=labRef"`
}

FeatureGroupSpec contain the desired state of a FeatureGroup

func (*FeatureGroupSpec) DeepCopy added in v0.5.282

func (in *FeatureGroupSpec) DeepCopy() *FeatureGroupSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureGroupSpec.

func (*FeatureGroupSpec) DeepCopyInto added in v0.5.282

func (in *FeatureGroupSpec) DeepCopyInto(out *FeatureGroupSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureGroupSpec) Descriptor added in v0.5.282

func (*FeatureGroupSpec) Descriptor() ([]byte, []int)

func (*FeatureGroupSpec) Marshal added in v0.5.282

func (m *FeatureGroupSpec) Marshal() (dAtA []byte, err error)

func (*FeatureGroupSpec) MarshalTo added in v0.5.282

func (m *FeatureGroupSpec) MarshalTo(dAtA []byte) (int, error)

func (*FeatureGroupSpec) MarshalToSizedBuffer added in v0.5.282

func (m *FeatureGroupSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureGroupSpec) ProtoMessage added in v0.5.282

func (*FeatureGroupSpec) ProtoMessage()

func (*FeatureGroupSpec) Reset added in v0.5.282

func (m *FeatureGroupSpec) Reset()

func (*FeatureGroupSpec) Size added in v0.5.282

func (m *FeatureGroupSpec) Size() (n int)

func (*FeatureGroupSpec) String added in v0.5.282

func (this *FeatureGroupSpec) String() string

func (*FeatureGroupSpec) Unmarshal added in v0.5.282

func (m *FeatureGroupSpec) Unmarshal(dAtA []byte) error

func (*FeatureGroupSpec) XXX_DiscardUnknown added in v0.5.282

func (m *FeatureGroupSpec) XXX_DiscardUnknown()

func (*FeatureGroupSpec) XXX_Marshal added in v0.5.282

func (m *FeatureGroupSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureGroupSpec) XXX_Merge added in v0.5.282

func (m *FeatureGroupSpec) XXX_Merge(src proto.Message)

func (*FeatureGroupSpec) XXX_Size added in v0.5.282

func (m *FeatureGroupSpec) XXX_Size() int

func (*FeatureGroupSpec) XXX_Unmarshal added in v0.5.282

func (m *FeatureGroupSpec) XXX_Unmarshal(b []byte) error

type FeatureGroupStatus added in v0.5.282

type FeatureGroupStatus struct {
	Phase FeatureGroupPhase `json:"phase,omitempty" protobuf:"bytes,2,opt,name=phase"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,3,opt,name=observedGeneration"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,4,opt,name=updatedAt"`
	// The current number of rows in the feature group.
	//+kubebuilder:validation:Optional
	Rows int32 `json:"rows,omitempty" protobuf:"varint,5,opt,name=rows"`
	// the ingest schedule status. During ingest we test and profile the current feature group content.
	//+kubebuilder:validation:Optional
	IngestSchedule catalog.RunScheduleStatus `json:"ingestSchedule,omitempty" protobuf:"bytes,7,opt,name=ingestSchedule"`
	// The status of the sync schedule
	//+kubebuilder:validation:Optional
	SyncSchedule catalog.RunScheduleStatus `json:"syncSchedule,omitempty" protobuf:"bytes,8,opt,name=syncSchedule"`
	// Holds the last online table location. set the null when running the sync process
	//+kubebuilder:validation:Optional
	OnlineTable catalog.DataLocation `json:"onlineTable,omitempty" protobuf:"bytes,9,opt,name=onlineTable"`
	// The last time an online table was created
	//+kubebuilder:validation:Optional
	OnlineTableCreated *metav1.Time `json:"onlineTableCreated,omitempty" protobuf:"bytes,10,opt,name=onlineTableCreated"`
	// The last monitor dataset name
	//+kubebuilder:validation:Optional
	IngestDatasetName string `json:"ingestDatasetName,omitempty" protobuf:"bytes,11,opt,name=ingestDatasetName"`
	// In the case of failure, the Study resource controller will set this field with a failure reason
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,12,opt,name=failureReason"`
	// In the case of failure, the Study resource controller will set this field with a failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,13,opt,name=failureMessage"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,14,rep,name=conditions"`
}

FeatureStatus defines the observed state of Feature

func (*FeatureGroupStatus) DeepCopy added in v0.5.282

func (in *FeatureGroupStatus) DeepCopy() *FeatureGroupStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureGroupStatus.

func (*FeatureGroupStatus) DeepCopyInto added in v0.5.282

func (in *FeatureGroupStatus) DeepCopyInto(out *FeatureGroupStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureGroupStatus) Descriptor added in v0.5.282

func (*FeatureGroupStatus) Descriptor() ([]byte, []int)

func (*FeatureGroupStatus) Marshal added in v0.5.282

func (m *FeatureGroupStatus) Marshal() (dAtA []byte, err error)

func (*FeatureGroupStatus) MarshalTo added in v0.5.282

func (m *FeatureGroupStatus) MarshalTo(dAtA []byte) (int, error)

func (*FeatureGroupStatus) MarshalToSizedBuffer added in v0.5.282

func (m *FeatureGroupStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureGroupStatus) ProtoMessage added in v0.5.282

func (*FeatureGroupStatus) ProtoMessage()

func (*FeatureGroupStatus) Reset added in v0.5.282

func (m *FeatureGroupStatus) Reset()

func (*FeatureGroupStatus) Size added in v0.5.282

func (m *FeatureGroupStatus) Size() (n int)

func (*FeatureGroupStatus) String added in v0.5.282

func (this *FeatureGroupStatus) String() string

func (*FeatureGroupStatus) Unmarshal added in v0.5.282

func (m *FeatureGroupStatus) Unmarshal(dAtA []byte) error

func (*FeatureGroupStatus) XXX_DiscardUnknown added in v0.5.282

func (m *FeatureGroupStatus) XXX_DiscardUnknown()

func (*FeatureGroupStatus) XXX_Marshal added in v0.5.282

func (m *FeatureGroupStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureGroupStatus) XXX_Merge added in v0.5.282

func (m *FeatureGroupStatus) XXX_Merge(src proto.Message)

func (*FeatureGroupStatus) XXX_Size added in v0.5.282

func (m *FeatureGroupStatus) XXX_Size() int

func (*FeatureGroupStatus) XXX_Unmarshal added in v0.5.282

func (m *FeatureGroupStatus) XXX_Unmarshal(b []byte) error

type FeatureHistogram

type FeatureHistogram struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              FeatureHistogramSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status FeatureHistogramStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:resource:path=featurehistograms,shortName=fh,singular=featurehistogram,categories={data,modela} +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Live",type="boolean",JSONPath=".spec.live" +kubebuilder:printcolumn:name="Start",type="date",JSONPath=".spec.start" +kubebuilder:printcolumn:name="End",type="date",JSONPath=".spec.end" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" FeatureHistogram represent a single feature in the feature store.

func ParseFeatureHistogramYaml

func ParseFeatureHistogramYaml(content []byte) (*FeatureHistogram, error)

func (*FeatureHistogram) AddConditions added in v0.4.514

func (fh *FeatureHistogram) AddConditions()

func (*FeatureHistogram) AddFinalizer

func (fh *FeatureHistogram) AddFinalizer()

func (*FeatureHistogram) CreateOrUpdateCond

func (fh *FeatureHistogram) CreateOrUpdateCond(cond metav1.Condition)

Merge or update condition Merge or update condition

func (*FeatureHistogram) DeepCopy

func (in *FeatureHistogram) DeepCopy() *FeatureHistogram

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogram.

func (*FeatureHistogram) DeepCopyInto

func (in *FeatureHistogram) DeepCopyInto(out *FeatureHistogram)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureHistogram) DeepCopyObject

func (in *FeatureHistogram) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeatureHistogram) Default

func (fh *FeatureHistogram) Default()

No defaults in this current release

func (*FeatureHistogram) DefaultDriftThreshold added in v0.5.59

func (fh *FeatureHistogram) DefaultDriftThreshold(metric catalog.Metric) float64

Used during drift unit test generation

func (*FeatureHistogram) Descriptor

func (*FeatureHistogram) Descriptor() ([]byte, []int)

func (FeatureHistogram) DriftAlert added in v0.4.976

func (fh FeatureHistogram) DriftAlert(tenantRef *v1.ObjectReference, notifierName *string, columns []string) *infra.Alert

func (*FeatureHistogram) Drifted added in v0.5.74

func (fh *FeatureHistogram) Drifted() bool

func (FeatureHistogram) ErrorAlert added in v0.4.976

func (fh FeatureHistogram) ErrorAlert(notification catalog.NotificationSpec, err error) *infra.Alert

func (*FeatureHistogram) Expired added in v0.5.74

func (fh *FeatureHistogram) Expired() bool

Return true if the feature histogram is expired

func (*FeatureHistogram) GetCond

func (fh *FeatureHistogram) GetCond(t string) metav1.Condition

func (FeatureHistogram) GetCondIdx

func (fh FeatureHistogram) GetCondIdx(t string) int

func (FeatureHistogram) GetObservedGeneration added in v0.6.252

func (featurehistogram FeatureHistogram) GetObservedGeneration() int64

func (FeatureHistogram) GetStatus added in v0.6.252

func (featurehistogram FeatureHistogram) GetStatus() proto.Message

func (*FeatureHistogram) HasFinalizer

func (fh *FeatureHistogram) HasFinalizer() bool

func (FeatureHistogram) IsArchived added in v0.4.1023

func (fh FeatureHistogram) IsArchived() bool

func (*FeatureHistogram) IsGitObj

func (fh *FeatureHistogram) IsGitObj() bool

func (FeatureHistogram) IsReady

func (fh FeatureHistogram) IsReady() bool

func (FeatureHistogram) Key

func (fh FeatureHistogram) Key() string

func (*FeatureHistogram) LabelWithCommit

func (fh *FeatureHistogram) LabelWithCommit(commit string, uname string, branch string)

func (FeatureHistogram) Live added in v0.5.74

func (fh FeatureHistogram) Live() bool

func (*FeatureHistogram) MarkDrift added in v0.5.74

func (fh *FeatureHistogram) MarkDrift()

func (*FeatureHistogram) MarkExpired added in v0.5.74

func (fh *FeatureHistogram) MarkExpired()

Mark Expired

func (*FeatureHistogram) MarkFailed added in v0.4.975

func (fh *FeatureHistogram) MarkFailed(msg string)

func (*FeatureHistogram) MarkGenTest added in v0.5.74

func (fh *FeatureHistogram) MarkGenTest()

MarkGenTest

func (*FeatureHistogram) MarkLive added in v0.5.74

func (fh *FeatureHistogram) MarkLive()

MarkLive

func (*FeatureHistogram) MarkReady

func (fh *FeatureHistogram) MarkReady()

func (*FeatureHistogram) MarkReadyToTest added in v0.5.74

func (fh *FeatureHistogram) MarkReadyToTest()

MarkReadyToTest

func (*FeatureHistogram) MarkUnitTestFailed added in v0.5.44

func (fh *FeatureHistogram) MarkUnitTestFailed(msg string, stop bool)

func (*FeatureHistogram) MarkUnitTested added in v0.5.44

func (fh *FeatureHistogram) MarkUnitTested()

func (*FeatureHistogram) MarkUnitTesting added in v0.5.44

func (fh *FeatureHistogram) MarkUnitTesting()

MarkUnitTesting

func (*FeatureHistogram) Marshal

func (m *FeatureHistogram) Marshal() (dAtA []byte, err error)

func (*FeatureHistogram) MarshalTo

func (m *FeatureHistogram) MarshalTo(dAtA []byte) (int, error)

func (*FeatureHistogram) MarshalToSizedBuffer

func (m *FeatureHistogram) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureHistogram) ProtoMessage

func (*FeatureHistogram) ProtoMessage()

func (*FeatureHistogram) RemoveFinalizer

func (fh *FeatureHistogram) RemoveFinalizer()

func (*FeatureHistogram) RepEntry

func (fh *FeatureHistogram) RepEntry() (string, error)

func (*FeatureHistogram) RepPath

func (fh *FeatureHistogram) RepPath(root string) (string, error)

Return the on disk rep location

func (*FeatureHistogram) Reset

func (m *FeatureHistogram) Reset()

func (*FeatureHistogram) SetChanged

func (fh *FeatureHistogram) SetChanged()

func (*FeatureHistogram) SetObservedGeneration added in v0.6.252

func (featurehistogram *FeatureHistogram) SetObservedGeneration(generation int64)

func (*FeatureHistogram) SetStatus added in v0.6.252

func (featurehistogram *FeatureHistogram) SetStatus(status interface{})

func (*FeatureHistogram) SetUpdatedAt added in v0.6.252

func (featurehistogram *FeatureHistogram) SetUpdatedAt(time *metav1.Time)

func (*FeatureHistogram) SetupWebhookWithManager

func (fh *FeatureHistogram) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*FeatureHistogram) ShouldDetectDriftForColumn added in v0.5.59

func (fh *FeatureHistogram) ShouldDetectDriftForColumn(column string) bool

Return true if we should detect drift for column

func (*FeatureHistogram) ShouldExpire added in v0.5.77

func (fh *FeatureHistogram) ShouldExpire(maxPredictions int32) bool

check if we need to expire the feature histogram

func (*FeatureHistogram) ShouldGenerateUnitTest added in v0.5.57

func (fh *FeatureHistogram) ShouldGenerateUnitTest() bool

Check if we need to generate unit test for this feature histogram

func (*FeatureHistogram) ShouldUnitTest added in v0.5.45

func (fh *FeatureHistogram) ShouldUnitTest() bool

check if we should compute drift, we should compute drift, if we have no drift parameters, and we pass the historam

func (*FeatureHistogram) Size

func (m *FeatureHistogram) Size() (n int)

func (*FeatureHistogram) String

func (this *FeatureHistogram) String() string

func (FeatureHistogram) UnitTested added in v0.5.44

func (fh FeatureHistogram) UnitTested() bool

func (*FeatureHistogram) Unmarshal

func (m *FeatureHistogram) Unmarshal(dAtA []byte) error

func (FeatureHistogram) ValidateCreate

func (fh FeatureHistogram) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (FeatureHistogram) ValidateDelete

func (fh FeatureHistogram) ValidateDelete() error

func (FeatureHistogram) ValidateUpdate

func (fh FeatureHistogram) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*FeatureHistogram) XXX_DiscardUnknown

func (m *FeatureHistogram) XXX_DiscardUnknown()

func (*FeatureHistogram) XXX_Marshal

func (m *FeatureHistogram) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureHistogram) XXX_Merge

func (m *FeatureHistogram) XXX_Merge(src proto.Message)

func (*FeatureHistogram) XXX_Size

func (m *FeatureHistogram) XXX_Size() int

func (*FeatureHistogram) XXX_Unmarshal

func (m *FeatureHistogram) XXX_Unmarshal(b []byte) error

type FeatureHistogramConditionType

type FeatureHistogramConditionType string

FeatureHistogramConditionType is the condition of the feature

type FeatureHistogramList

type FeatureHistogramList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []FeatureHistogram `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true FeatureHistogramList contain a list of feature objects

func (*FeatureHistogramList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogramList.

func (*FeatureHistogramList) DeepCopyInto

func (in *FeatureHistogramList) DeepCopyInto(out *FeatureHistogramList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureHistogramList) DeepCopyObject

func (in *FeatureHistogramList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeatureHistogramList) Descriptor

func (*FeatureHistogramList) Descriptor() ([]byte, []int)

func (*FeatureHistogramList) Marshal

func (m *FeatureHistogramList) Marshal() (dAtA []byte, err error)

func (*FeatureHistogramList) MarshalTo

func (m *FeatureHistogramList) MarshalTo(dAtA []byte) (int, error)

func (*FeatureHistogramList) MarshalToSizedBuffer

func (m *FeatureHistogramList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureHistogramList) ProtoMessage

func (*FeatureHistogramList) ProtoMessage()

func (*FeatureHistogramList) Reset

func (m *FeatureHistogramList) Reset()

func (*FeatureHistogramList) Size

func (m *FeatureHistogramList) Size() (n int)

func (*FeatureHistogramList) String

func (this *FeatureHistogramList) String() string

func (*FeatureHistogramList) Unmarshal

func (m *FeatureHistogramList) Unmarshal(dAtA []byte) error

func (*FeatureHistogramList) XXX_DiscardUnknown

func (m *FeatureHistogramList) XXX_DiscardUnknown()

func (*FeatureHistogramList) XXX_Marshal

func (m *FeatureHistogramList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureHistogramList) XXX_Merge

func (m *FeatureHistogramList) XXX_Merge(src proto.Message)

func (*FeatureHistogramList) XXX_Size

func (m *FeatureHistogramList) XXX_Size() int

func (*FeatureHistogramList) XXX_Unmarshal

func (m *FeatureHistogramList) XXX_Unmarshal(b []byte) error

type FeatureHistogramPhase added in v0.4.975

type FeatureHistogramPhase string
const (
	FeatureHistogramPhasePending     FeatureHistogramPhase = "Pending"        // when generating
	FeatureHistogramPhaseLive        FeatureHistogramPhase = "Live"           // when the feature histogram is live. I.e. get updated by predictions
	FeatureHistogramPhaseExpired     FeatureHistogramPhase = "Expired"        // when the feature histogram is expired. I.e. get updated by predictions
	FeatureHistogramPhaseGenTest     FeatureHistogramPhase = "GeneratingTest" // when the unit tests are generated
	FeatureHistogramPhaseReadyToTest FeatureHistogramPhase = "ReadyToTest"    // when the unit tests were generated or the histogram has tests
	FeatureHistogramPhaseUnitTesting FeatureHistogramPhase = "UnitTesting"    // when running the unit tests
	FeatureHistogramPhaseDrift       FeatureHistogramPhase = "Drift"          // when one or more column drifted
	FeatureHistogramPhaseReady       FeatureHistogramPhase = "Ready"          // when ready and not drift.
	FeatureHistogramPhaseFailed      FeatureHistogramPhase = "Failed"         // failed in the process.
)

type FeatureHistogramSpec

type FeatureHistogramSpec struct {
	// The feature owner
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:="no-one"
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// VersionName references the name of a Data Product Version that describes the version of the resource.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// Comments is a description of the feature
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// The list of columns to generate the histograms.
	// +kubebuilder:validation:Optional
	Columns []string `json:"columns,omitempty" protobuf:"bytes,5,rep,name=columns"`
	// A reference to the dataset or predictor that contain the column with this histogram
	// +kubebuilder:validation:Optional
	SourceRef *v1.ObjectReference `json:"sourceRef,omitempty" protobuf:"bytes,6,opt,name=sourceRef"`
	// If true, this is a training dataset feature histogram. If false the histogram was generated during serving.
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Training *bool `json:"training,omitempty" protobuf:"varint,7,opt,name=training"`
	// If true, this is an active feature histogram. This feature histogram is being live updated by the predictorlet
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Live *bool `json:"live,omitempty" protobuf:"varint,9,opt,name=live"`
	// The start time of this feature histogram. For training dataset histogram this is set to the creation
	// time of the dataset
	// +kubebuilder:validation:Optional
	Start *metav1.Time `json:"start,omitempty" protobuf:"bytes,10,opt,name=start"`
	// The end time of the feature histogram. If reached, the predictor will start a new feature histogram
	// +kubebuilder:validation:Optional
	End *metav1.Time `json:"end,omitempty" protobuf:"bytes,11,opt,name=end"`
	// The histogram to compare to for data drift calc
	// +kubebuilder:validation:Optional
	BaseRef v1.ObjectReference `json:"baseRef,omitempty" protobuf:"bytes,12,opt,name=baseRef"`
	// Define drift thresholds. This is usually assigned from the predictor.
	// +kubebuilder:validation:Optional
	DriftThresholds []DriftThreshold `json:"driftThresholds" protobuf:"bytes,13,rep,name=driftThresholds"`
	// How much time in seconds, we should sync the im memory histograms to etcd
	// Default is one minute.
	// +kubebuilder:default:=60
	// +kubebuilder:validation:Optional
	SyncIntervalSec *int32 `json:"syncIntervalSec,omitempty" protobuf:"varint,14,opt,name=syncIntervalSec"`
	// Test suite for this histogram.
	// +kubebuilder:validation:Optional
	UnitTests catalog.TestSuite `json:"unitTests,omitempty" protobuf:"bytes,15,opt,name=unitTests"`
	// If true, generate the unit tests
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	GenUnitTests *bool `json:"genUnitTests,omitempty" protobuf:"varint,16,opt,name=genUnitTests"`
	// Filter the filter for this unit test.
	// +kubebuilder:validation:Optional
	FeatureFilter catalog.FeatureFilterType `json:"featureFilter,omitempty" protobuf:"bytes,17,opt,name=featureFilter"`
	// Set the reference type for this unit test
	// +kubebuilder:validation:Optional
	ReferenceType catalog.ReferenceDataType `json:"referenceType,omitempty" protobuf:"bytes,18,opt,name=referenceType"`
}

FeatureHistogramSpec contain the desired state of a FeatureHistogram

func (*FeatureHistogramSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogramSpec.

func (*FeatureHistogramSpec) DeepCopyInto

func (in *FeatureHistogramSpec) DeepCopyInto(out *FeatureHistogramSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureHistogramSpec) Descriptor

func (*FeatureHistogramSpec) Descriptor() ([]byte, []int)

func (*FeatureHistogramSpec) Marshal

func (m *FeatureHistogramSpec) Marshal() (dAtA []byte, err error)

func (*FeatureHistogramSpec) MarshalTo

func (m *FeatureHistogramSpec) MarshalTo(dAtA []byte) (int, error)

func (*FeatureHistogramSpec) MarshalToSizedBuffer

func (m *FeatureHistogramSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureHistogramSpec) ProtoMessage

func (*FeatureHistogramSpec) ProtoMessage()

func (*FeatureHistogramSpec) Reset

func (m *FeatureHistogramSpec) Reset()

func (*FeatureHistogramSpec) Size

func (m *FeatureHistogramSpec) Size() (n int)

func (*FeatureHistogramSpec) String

func (this *FeatureHistogramSpec) String() string

func (*FeatureHistogramSpec) Unmarshal

func (m *FeatureHistogramSpec) Unmarshal(dAtA []byte) error

func (*FeatureHistogramSpec) XXX_DiscardUnknown

func (m *FeatureHistogramSpec) XXX_DiscardUnknown()

func (*FeatureHistogramSpec) XXX_Marshal

func (m *FeatureHistogramSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureHistogramSpec) XXX_Merge

func (m *FeatureHistogramSpec) XXX_Merge(src proto.Message)

func (*FeatureHistogramSpec) XXX_Size

func (m *FeatureHistogramSpec) XXX_Size() int

func (*FeatureHistogramSpec) XXX_Unmarshal

func (m *FeatureHistogramSpec) XXX_Unmarshal(b []byte) error

type FeatureHistogramStatus

type FeatureHistogramStatus struct {
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// The histogram values, map from column name to an histogram
	// +kubebuilder:validation:Optional
	Columns []ColumnHistogram `json:"columns,omitempty" protobuf:"bytes,2,rep,name=columns"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,3,opt,name=updatedAt"`
	// The log file specification that determines the location of all logs produced by the object
	// +kubebuilder:validation:Optional
	Logs catalog.Logs `json:"logs,omitempty" protobuf:"bytes,5,opt,name=logs"`
	// The phase of the feature histogram
	// +kubebuilder:validation:Optional
	Phase FeatureHistogramPhase `json:"phase,omitempty" protobuf:"bytes,6,opt,name=phase"`
	// In the case of failure, the Dataset resource controller will set this field with a failure reason
	//+kubebuilder:validation:Optional
	FailureReason catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,7,opt,name=failureReason"`
	// In the case of failure, the Dataset resource controller will set this field with a failure message
	//+kubebuilder:validation:Optional
	FailureMessage string `json:"failureMessage,omitempty" protobuf:"bytes,8,opt,name=failureMessage"`
	// Test suite for this histogram.
	//+kubebuilder:validation:Optional
	UnitTestsResult catalog.TestSuiteResult `json:"unitTestsResult,omitempty" protobuf:"bytes,9,opt,name=unitTestsResult"`
	// Total prediction recorded by this feature histograms
	//+kubebuilder:validation:Optional
	Total int32 `json:"total,omitempty" protobuf:"varint,10,opt,name=total"`
	// Errors predictions
	//+kubebuilder:validation:Optional
	Errors int32 `json:"errors,omitempty" protobuf:"varint,11,opt,name=errors"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,12,rep,name=conditions"`
}

FeatureHistogramStatus defines the observed state of FeatureHistogram

func (*FeatureHistogramStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogramStatus.

func (*FeatureHistogramStatus) DeepCopyInto

func (in *FeatureHistogramStatus) DeepCopyInto(out *FeatureHistogramStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureHistogramStatus) Descriptor

func (*FeatureHistogramStatus) Descriptor() ([]byte, []int)

func (*FeatureHistogramStatus) Marshal

func (m *FeatureHistogramStatus) Marshal() (dAtA []byte, err error)

func (*FeatureHistogramStatus) MarshalTo

func (m *FeatureHistogramStatus) MarshalTo(dAtA []byte) (int, error)

func (*FeatureHistogramStatus) MarshalToSizedBuffer

func (m *FeatureHistogramStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureHistogramStatus) ProtoMessage

func (*FeatureHistogramStatus) ProtoMessage()

func (*FeatureHistogramStatus) Reset

func (m *FeatureHistogramStatus) Reset()

func (*FeatureHistogramStatus) Size

func (m *FeatureHistogramStatus) Size() (n int)

func (*FeatureHistogramStatus) String

func (this *FeatureHistogramStatus) String() string

func (*FeatureHistogramStatus) Unmarshal

func (m *FeatureHistogramStatus) Unmarshal(dAtA []byte) error

func (*FeatureHistogramStatus) XXX_DiscardUnknown

func (m *FeatureHistogramStatus) XXX_DiscardUnknown()

func (*FeatureHistogramStatus) XXX_Marshal

func (m *FeatureHistogramStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureHistogramStatus) XXX_Merge

func (m *FeatureHistogramStatus) XXX_Merge(src proto.Message)

func (*FeatureHistogramStatus) XXX_Size

func (m *FeatureHistogramStatus) XXX_Size() int

func (*FeatureHistogramStatus) XXX_Unmarshal

func (m *FeatureHistogramStatus) XXX_Unmarshal(b []byte) error

type FeatureStatistics added in v0.6.252

type FeatureStatistics struct {
	// The name of the feature
	// +kubebuilder:validation:Optional
	Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	// The data type of the feature
	// +kubebuilder:validation:Optional
	DataType catalog.DataType `json:"datatype,omitempty" protobuf:"bytes,2,opt,name=datatype"`
	// Amount of rows which contain a value for the feature
	// +kubebuilder:validation:Optional
	Count float64 `json:"count,omitempty" protobuf:"bytes,3,opt,name=count"`
	// Amount of unique values present in the feature
	// +kubebuilder:validation:Optional
	Distinct int32 `json:"distinct,omitempty" protobuf:"varint,4,opt,name=distinct"`
	// Amount of missing values present in the feature
	// +kubebuilder:validation:Optional
	Missing int32 `json:"missing,omitempty" protobuf:"varint,5,opt,name=missing"`
	// Percentage of missing values in the feature
	// +kubebuilder:validation:Optional
	PercentMissing float64 `json:"percentMissing,omitempty" protobuf:"bytes,6,opt,name=percentMissing"`
	// The mean of all values in the feature
	// +kubebuilder:validation:Optional
	Mean float64 `json:"mean,omitempty" protobuf:"bytes,7,opt,name=mean"`
	// The standard deviation of the feature values
	// +kubebuilder:validation:Optional
	StdDev float64 `json:"stddev,omitempty" protobuf:"bytes,8,opt,name=stddev"`
	// The variability of the feature values
	// +kubebuilder:validation:Optional
	Variance float64 `json:"variance,omitempty" protobuf:"bytes,9,opt,name=variance"`
	// The minimum value of all values in the feature
	// +kubebuilder:validation:Optional
	Min float64 `json:"min,omitempty" protobuf:"bytes,10,opt,name=min"`
	// The maximum value of all values in the feature
	// +kubebuilder:validation:Optional
	Max float64 `json:"max,omitempty" protobuf:"bytes,11,opt,name=max"`
	// The computed kurtosis, which measures the peakness of the distribution of values in the feature
	// +kubebuilder:validation:Optional
	Kurtosis float64 `json:"kurtosis,omitempty" protobuf:"bytes,12,opt,name=kurtosis"`
	// The computed skewness, which measures the asymmetry of the distribution of values in the feature
	// +kubebuilder:validation:Optional
	Skewness float64 `json:"skewness,omitempty" protobuf:"bytes,13,opt,name=skewness"`
	// Sum contains the sum of all values in the column
	// +kubebuilder:validation:Optional
	Sum int64 `json:"sum,omitempty" protobuf:"bytes,14,opt,name=sum"`
	// Mad contains the mean absolute deviation of the feature
	// +kubebuilder:validation:Optional
	Mad float64 `json:"mad,omitempty" protobuf:"bytes,15,opt,name=mad"`
	// The 25% point of all the values of the feature order
	// +kubebuilder:validation:Optional
	P25 float64 `json:"p25,omitempty" protobuf:"bytes,16,opt,name=p25"`
	// The 50% point of all the values of the feature in order, also known as the median
	// +kubebuilder:validation:Optional
	P50 float64 `json:"p50,omitempty" protobuf:"bytes,17,opt,name=p50"`
	// The 75% point of all the values of the feature in order
	// +kubebuilder:validation:Optional
	P75 float64 `json:"p75,omitempty" protobuf:"bytes,18,opt,name=p75"`
	// The interquartile range of the feature values
	// +kubebuilder:validation:Optional
	IQR float64 `json:"iqr,omitempty" protobuf:"bytes,19,opt,name=iqr"`
	// The mode value of the feature, also known as the most frequent value
	// +kubebuilder:validation:Optional
	Mode string `json:"mode,omitempty" protobuf:"bytes,20,opt,name=mode"`
	// The number of zero values in the feature
	// +kubebuilder:validation:Optional
	Zeros float64 `json:"zeros,omitempty" protobuf:"bytes,21,opt,name=zeros"`
	// The number of invalid values in the feature
	// +kubebuilder:validation:Optional
	Invalid int32 `json:"invalid,omitempty" protobuf:"varint,22,opt,name=invalid"`
	// The feature importance of the feature
	// +kubebuilder:validation:Optional
	Importance float64 `json:"importance,omitempty" protobuf:"bytes,23,opt,name=importance"`
	// Indicates if the feature is the target attribute for a Study
	// +kubebuilder:validation:Optional
	Target bool `json:"target,omitempty" protobuf:"varint,24,opt,name=target"`
	// Indicates if the column should be ignored
	// +kubebuilder:validation:Optional
	Ignore bool `json:"ignore,omitempty" protobuf:"varint,25,opt,name=ignore"`
	// Indicates if the feature may contain null values
	// +kubebuilder:validation:Optional
	Nullable bool `json:"nullable,omitempty" protobuf:"varint,26,opt,name=nullable"`
	// Indicates if the feature has high cardinality and should use the high cardinality encoder during feature engineering
	// +kubebuilder:validation:Optional
	HighCardinality bool `json:"highCardinality,omitempty" protobuf:"varint,27,opt,name=highCardinality"`
	// Indicates if the feature has high correlation with another feature, and that it should be dropped
	// +kubebuilder:validation:Optional
	HighCorrWithOtherFeatures bool `json:"highCorrWithOtherFeatures,omitempty" protobuf:"varint,28,opt,name=highCorrWithOtherFeatures"`
	// Indicate that the feature has low correlation with the target feature, and that it should be dropped
	// +kubebuilder:validation:Optional
	LowCorrWithTarget bool `json:"lowCorrWithTarget,omitempty" protobuf:"varint,29,opt,name=lowCorrWithTarget"`
	// Indicates if the feature has a high percentage of missing values, and that it should be dropped
	// +kubebuilder:validation:Optional
	HighMissingPct bool `json:"highMissingPct,omitempty" protobuf:"varint,30,opt,name=highMissingPct"`
	// Marks that the feature is skewed and would require a power transform
	// +kubebuilder:validation:Optional
	Skewed bool `json:"skewed,omitempty" protobuf:"varint,31,opt,name=skewed"`
	// +kubebuilder:validation:Optional
	Constant bool `json:"constant,omitempty" protobuf:"varint,33,opt,name=constant"`
	// Indicates if the feature is a duplicate of another feature
	// +kubebuilder:validation:Optional
	Duplicate bool `json:"duplicate,omitempty" protobuf:"varint,34,opt,name=duplicate"`
	// Indicates if the feature is reserved and must be included in model training
	// +kubebuilder:validation:Optional
	Reserved bool `json:"reserved,omitempty" protobuf:"varint,35,opt,name=reserved"`
	// The ratio between non-null and null values in the feature
	// +kubebuilder:validation:Optional
	Completeness float64 `json:"completeness,omitempty" protobuf:"bytes,37,opt,name=completeness"`
	// The ratio between unique values and non-unique values in the feature
	// +kubebuilder:validation:Optional
	DistinctValueCount float64 `json:"distinctValueCount,omitempty" protobuf:"bytes,38,opt,name=distinctValueCount"`
	// The ratio between most the most frequent value to the number of total values in the feature
	// +kubebuilder:validation:Optional
	MostFreqValuesRatio float64 `json:"mostFreqValuesRatio,omitempty" protobuf:"bytes,39,opt,name=mostFreqValuesRatio"`
	// Histogram data representing the distribution of the values in the column
	// +kubebuilder:validation:Optional
	Histogram catalog.HistogramData `json:"histogram,omitempty" protobuf:"bytes,41,opt,name=histogram"`
	// The correlation to the target feature of the dataset
	// +kubebuilder:validation:Optional
	CorrToTarget float64 `json:"corrToTarget,omitempty" protobuf:"bytes,42,opt,name=corrToTarget"`
	// The index of the column in the dataset
	Index int32 `json:"index,omitempty" protobuf:"bytes,43,opt,name=index"`
	// Statistics relevant to outliers
	Outliers OutlierStatistics `json:"outliers,omitempty" protobuf:"bytes,44,opt,name=outliers"`
	// Distinct values, in the case the feature is categorical
	Values []string `json:"values,omitempty" protobuf:"bytes,45,opt,name=values"`
}

FeatureStatistics contains statistical parameters for a single feature from a dataset

func (FeatureStatistics) BigBoolTest added in v0.6.252

func (feature FeatureStatistics) BigBoolTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase

func (FeatureStatistics) BigCatTest added in v0.6.252

func (feature FeatureStatistics) BigCatTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase

func (FeatureStatistics) BigNumericTest added in v0.6.252

func (feature FeatureStatistics) BigNumericTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase

func (*FeatureStatistics) DeepCopy added in v0.6.252

func (in *FeatureStatistics) DeepCopy() *FeatureStatistics

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureStatistics.

func (*FeatureStatistics) DeepCopyInto added in v0.6.252

func (in *FeatureStatistics) DeepCopyInto(out *FeatureStatistics)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureStatistics) Descriptor added in v0.6.252

func (*FeatureStatistics) Descriptor() ([]byte, []int)

func (FeatureStatistics) GenDriftTestCase added in v0.6.252

func (feature FeatureStatistics) GenDriftTestCase(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase

func (*FeatureStatistics) Marshal added in v0.6.252

func (m *FeatureStatistics) Marshal() (dAtA []byte, err error)

func (*FeatureStatistics) MarshalTo added in v0.6.252

func (m *FeatureStatistics) MarshalTo(dAtA []byte) (int, error)

func (*FeatureStatistics) MarshalToSizedBuffer added in v0.6.252

func (m *FeatureStatistics) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureStatistics) ProtoMessage added in v0.6.252

func (*FeatureStatistics) ProtoMessage()

func (*FeatureStatistics) Reset added in v0.6.252

func (m *FeatureStatistics) Reset()

func (*FeatureStatistics) Size added in v0.6.252

func (m *FeatureStatistics) Size() (n int)

func (FeatureStatistics) SmallBoolTest added in v0.6.252

func (feature FeatureStatistics) SmallBoolTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase

func (FeatureStatistics) SmallCatTest added in v0.6.252

func (feature FeatureStatistics) SmallCatTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase

func (FeatureStatistics) SmallNumericTest added in v0.6.252

func (feature FeatureStatistics) SmallNumericTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase

func (*FeatureStatistics) String added in v0.6.252

func (this *FeatureStatistics) String() string

func (*FeatureStatistics) Unmarshal added in v0.6.252

func (m *FeatureStatistics) Unmarshal(dAtA []byte) error

func (*FeatureStatistics) XXX_DiscardUnknown added in v0.6.252

func (m *FeatureStatistics) XXX_DiscardUnknown()

func (*FeatureStatistics) XXX_Marshal added in v0.6.252

func (m *FeatureStatistics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureStatistics) XXX_Merge added in v0.6.252

func (m *FeatureStatistics) XXX_Merge(src proto.Message)

func (*FeatureStatistics) XXX_Size added in v0.6.252

func (m *FeatureStatistics) XXX_Size() int

func (*FeatureStatistics) XXX_Unmarshal added in v0.6.252

func (m *FeatureStatistics) XXX_Unmarshal(b []byte) error

type FlatFileFormatSpec added in v0.4.861

type FlatFileFormatSpec struct {
	// The file type of incoming data which uses the DataSource (by default, a CSV file)
	// +kubebuilder:default:="csv"
	// +kubebuilder:validation:Optional
	FileType *FlatFileType `json:"fileType,omitempty" protobuf:"bytes,1,opt,name=fileType"`
	// The file format for CSV files, if applicable
	// +kubebuilder:validation:Optional
	Csv CsvFileSpec `json:"csv,omitempty" protobuf:"bytes,2,opt,name=csv"`
	// The file format for Excel files, if applicable
	// +kubebuilder:validation:Optional
	Excel ExcelNotebookSpec `json:"excel,omitempty" protobuf:"bytes,3,opt,name=excel"`
	// The file format for Parquet files, if applicable
	// +kubebuilder:validation:Optional
	Parquet ParquetFileSpec `json:"parquet,omitempty" protobuf:"bytes,4,opt,name=parquet"`
}

FlatFileFormatSpec defines the format for incoming flat-files to be parsed

func (*FlatFileFormatSpec) DeepCopy added in v0.4.861

func (in *FlatFileFormatSpec) DeepCopy() *FlatFileFormatSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FlatFileFormatSpec.

func (*FlatFileFormatSpec) DeepCopyInto added in v0.4.861

func (in *FlatFileFormatSpec) DeepCopyInto(out *FlatFileFormatSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FlatFileFormatSpec) Descriptor added in v0.4.861

func (*FlatFileFormatSpec) Descriptor() ([]byte, []int)

func (*FlatFileFormatSpec) Marshal added in v0.4.861

func (m *FlatFileFormatSpec) Marshal() (dAtA []byte, err error)

func (*FlatFileFormatSpec) MarshalTo added in v0.4.861

func (m *FlatFileFormatSpec) MarshalTo(dAtA []byte) (int, error)

func (*FlatFileFormatSpec) MarshalToSizedBuffer added in v0.4.861

func (m *FlatFileFormatSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FlatFileFormatSpec) ProtoMessage added in v0.4.861

func (*FlatFileFormatSpec) ProtoMessage()

func (*FlatFileFormatSpec) Reset added in v0.4.861

func (m *FlatFileFormatSpec) Reset()

func (*FlatFileFormatSpec) Size added in v0.4.861

func (m *FlatFileFormatSpec) Size() (n int)

func (*FlatFileFormatSpec) String added in v0.4.861

func (this *FlatFileFormatSpec) String() string

func (*FlatFileFormatSpec) Unmarshal added in v0.4.861

func (m *FlatFileFormatSpec) Unmarshal(dAtA []byte) error

func (*FlatFileFormatSpec) XXX_DiscardUnknown added in v0.4.861

func (m *FlatFileFormatSpec) XXX_DiscardUnknown()

func (*FlatFileFormatSpec) XXX_Marshal added in v0.4.861

func (m *FlatFileFormatSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FlatFileFormatSpec) XXX_Merge added in v0.4.861

func (m *FlatFileFormatSpec) XXX_Merge(src proto.Message)

func (*FlatFileFormatSpec) XXX_Size added in v0.4.861

func (m *FlatFileFormatSpec) XXX_Size() int

func (*FlatFileFormatSpec) XXX_Unmarshal added in v0.4.861

func (m *FlatFileFormatSpec) XXX_Unmarshal(b []byte) error

type FlatFileType

type FlatFileType string

FlatFileType represent the type of the file +kubebuilder:validation:Enum="csv";"tsv";"excel";"fwf";"hdf";"html";"json";"pickle";"sas";"stata";"feather";"parquet";

const (
	FlatFileTypeCsv     FlatFileType = "csv"
	FlatFileTypeTable   FlatFileType = "tsv"
	FlatFileTypeExcel   FlatFileType = "excel"
	FlatFileTypeFwf     FlatFileType = "fwf"
	FlatFileTypeHdf5    FlatFileType = "hdf"
	FlatFileTypeHtml    FlatFileType = "html"
	FlatFileTypeJson    FlatFileType = "json"
	FlatFileTypePickle  FlatFileType = "pickle"
	FlatFileTypeSas     FlatFileType = "sas"
	FlatFileTypeStata   FlatFileType = "stata"
	FlatFileTypeFeather FlatFileType = "feather"
	FlatFileTypeParquet FlatFileType = "parquet"
)

type GaugeSpec

type GaugeSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// Column is the name of the column
	// +kubebuilder:validation:Optional
	Column *string `json:"column,omitempty" protobuf:"bytes,2,opt,name=column"`
	// Row is the row number
	// +kubebuilder:validation:Optional
	Row *int32 `json:"row,omitempty" protobuf:"varint,3,opt,name=row"`
	// Scalar the a const value
	// +kubebuilder:validation:Optional
	Scalar *string `json:"scalar,omitempty" protobuf:"bytes,4,opt,name=scalar"`
}

func (*GaugeSpec) DeepCopy

func (in *GaugeSpec) DeepCopy() *GaugeSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GaugeSpec.

func (*GaugeSpec) DeepCopyInto

func (in *GaugeSpec) DeepCopyInto(out *GaugeSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*GaugeSpec) Descriptor

func (*GaugeSpec) Descriptor() ([]byte, []int)

func (*GaugeSpec) Marshal

func (m *GaugeSpec) Marshal() (dAtA []byte, err error)

func (*GaugeSpec) MarshalTo

func (m *GaugeSpec) MarshalTo(dAtA []byte) (int, error)

func (*GaugeSpec) MarshalToSizedBuffer

func (m *GaugeSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*GaugeSpec) ProtoMessage

func (*GaugeSpec) ProtoMessage()

func (*GaugeSpec) Reset

func (m *GaugeSpec) Reset()

func (*GaugeSpec) Size

func (m *GaugeSpec) Size() (n int)

func (*GaugeSpec) String

func (this *GaugeSpec) String() string

func (*GaugeSpec) Unmarshal

func (m *GaugeSpec) Unmarshal(dAtA []byte) error

func (*GaugeSpec) XXX_DiscardUnknown

func (m *GaugeSpec) XXX_DiscardUnknown()

func (*GaugeSpec) XXX_Marshal

func (m *GaugeSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*GaugeSpec) XXX_Merge

func (m *GaugeSpec) XXX_Merge(src proto.Message)

func (*GaugeSpec) XXX_Size

func (m *GaugeSpec) XXX_Size() int

func (*GaugeSpec) XXX_Unmarshal

func (m *GaugeSpec) XXX_Unmarshal(b []byte) error

type GitLocation

type GitLocation struct {
	// The Git ConnectionName resource which exists in the same tenant as the parent DataProduct
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	GitConnectionName *string `json:"gitConnectionName,omitempty" protobuf:"bytes,1,opt,name=gitConnectionName"`
	// The URL to the destination Git repository
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=256
	URL *string `json:"url,omitempty" protobuf:"bytes,2,opt,name=url"`
	// The branch inside the Git repository
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=256
	Branch *string `json:"branch,omitempty" protobuf:"bytes,3,opt,name=branch"`
	// Indicates if the repository is private
	// +kubebuilder:default:=true
	Private *bool `json:"private,omitempty" protobuf:"varint,4,opt,name=private"`
}

GitLocation specifies the Git location where Modela will track resources as YAML

func (*GitLocation) DeepCopy

func (in *GitLocation) DeepCopy() *GitLocation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GitLocation.

func (*GitLocation) DeepCopyInto

func (in *GitLocation) DeepCopyInto(out *GitLocation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*GitLocation) Descriptor

func (*GitLocation) Descriptor() ([]byte, []int)

func (*GitLocation) Marshal

func (m *GitLocation) Marshal() (dAtA []byte, err error)

func (*GitLocation) MarshalTo

func (m *GitLocation) MarshalTo(dAtA []byte) (int, error)

func (*GitLocation) MarshalToSizedBuffer

func (m *GitLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*GitLocation) ProtoMessage

func (*GitLocation) ProtoMessage()

func (*GitLocation) Reset

func (m *GitLocation) Reset()

func (*GitLocation) Size

func (m *GitLocation) Size() (n int)

func (*GitLocation) String

func (this *GitLocation) String() string

func (*GitLocation) Unmarshal

func (m *GitLocation) Unmarshal(dAtA []byte) error

func (*GitLocation) XXX_DiscardUnknown

func (m *GitLocation) XXX_DiscardUnknown()

func (*GitLocation) XXX_Marshal

func (m *GitLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*GitLocation) XXX_Merge

func (m *GitLocation) XXX_Merge(src proto.Message)

func (*GitLocation) XXX_Size

func (m *GitLocation) XXX_Size() int

func (*GitLocation) XXX_Unmarshal

func (m *GitLocation) XXX_Unmarshal(b []byte) error

type GroupBySpec added in v0.5.174

type GroupBySpec struct {
	// Enabled the group by, if not enabled, the dataset is treated as defined in the data source.
	// +kubebuilder:validation:Optional
	Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"`
	// For group forecasting, this is the key of the group
	// If not specify this will be the key from the data source.
	// +kubebuilder:validation:Optional
	GroupBy []string `json:"groupBy,omitempty" protobuf:"bytes,2,rep,name=groupBy"`
	// The time series frequency, if not specify they freq will be the base freq from the data source.
	// +kubebuilder:default:="day"
	// +kubebuilder:validation:Optional
	Frequency *catalog.Frequency `json:"frequency,omitempty" protobuf:"bytes,3,opt,name=frequency"`
	// The interval to forecast at this level. If not specify the interval will be the base interval
	// the data source
	// +kubebuilder:default:=1
	// +kubebuilder:validation:Optional
	Interval *int32 `json:"interval,omitempty" protobuf:"varint,4,opt,name=interval"`
	// Aggregation function. Define how to aggregate
	// By default this is the aggregation function from the data source.
	// +kubebuilder:default:="sum"
	// +kubebuilder:validation:Optional
	Aggregate catalog.Aggregate `json:"aggregate,omitempty" protobuf:"bytes,5,opt,name=aggregate"`
}

GroupBySpec defines the configuration to split the dataset by groups

func (*GroupBySpec) DeepCopy added in v0.5.174

func (in *GroupBySpec) DeepCopy() *GroupBySpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupBySpec.

func (*GroupBySpec) DeepCopyInto added in v0.5.174

func (in *GroupBySpec) DeepCopyInto(out *GroupBySpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*GroupBySpec) Descriptor added in v0.5.174

func (*GroupBySpec) Descriptor() ([]byte, []int)

func (*GroupBySpec) Marshal added in v0.5.174

func (m *GroupBySpec) Marshal() (dAtA []byte, err error)

func (*GroupBySpec) MarshalTo added in v0.5.174

func (m *GroupBySpec) MarshalTo(dAtA []byte) (int, error)

func (*GroupBySpec) MarshalToSizedBuffer added in v0.5.174

func (m *GroupBySpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*GroupBySpec) ProtoMessage added in v0.5.174

func (*GroupBySpec) ProtoMessage()

func (*GroupBySpec) Reset added in v0.5.174

func (m *GroupBySpec) Reset()

func (*GroupBySpec) Size added in v0.5.174

func (m *GroupBySpec) Size() (n int)

func (*GroupBySpec) String added in v0.5.174

func (this *GroupBySpec) String() string

func (*GroupBySpec) Unmarshal added in v0.5.174

func (m *GroupBySpec) Unmarshal(dAtA []byte) error

func (*GroupBySpec) XXX_DiscardUnknown added in v0.5.174

func (m *GroupBySpec) XXX_DiscardUnknown()

func (*GroupBySpec) XXX_Marshal added in v0.5.174

func (m *GroupBySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*GroupBySpec) XXX_Merge added in v0.5.174

func (m *GroupBySpec) XXX_Merge(src proto.Message)

func (*GroupBySpec) XXX_Size added in v0.5.174

func (m *GroupBySpec) XXX_Size() int

func (*GroupBySpec) XXX_Unmarshal added in v0.5.174

func (m *GroupBySpec) XXX_Unmarshal(b []byte) error

type GroupDatasetLocationStatus added in v0.6.252

type GroupDatasetLocationStatus struct {
	// The root folder of all the groups
	GroupsRoot *string `json:"groupsRoot,omitempty" protobuf:"bytes,1,opt,name=groupsRoot"`
	// The root of the group folders and files
	// +kubebuilder:validation:Optional
	GroupRoot *string `json:"groupRoot,omitempty" protobuf:"bytes,2,opt,name=groupRoot"`
	// The folder of group data
	// +kubebuilder:validation:Optional
	GroupDataFolder *string `json:"groupDataFolder,omitempty" protobuf:"bytes,3,opt,name=groupDataFolder"`
	// +kubebuilder:validation:Optional
	GroupDataIndex *string `json:"groupDataFile,omitempty" protobuf:"bytes,4,opt,name=groupDataFile"`
	// +kubebuilder:validation:Optional
	GroupProfileFolder *string `json:"groupProfileFolder,omitempty" protobuf:"bytes,5,opt,name=groupProfileFolder"`
	// +kubebuilder:validation:Optional
	GroupReportIndex *string `json:"groupReportFile,omitempty" protobuf:"bytes,6,opt,name=groupReportFile"`
	// +kubebuilder:validation:Optional
	GroupFeaturesFile *string `json:"groupFeaturesFile,omitempty" protobuf:"bytes,7,opt,name=groupFeaturesFile"`
}

func (*GroupDatasetLocationStatus) DeepCopy added in v0.6.252

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupDatasetLocationStatus.

func (*GroupDatasetLocationStatus) DeepCopyInto added in v0.6.252

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*GroupDatasetLocationStatus) Descriptor added in v0.6.252

func (*GroupDatasetLocationStatus) Descriptor() ([]byte, []int)

func (*GroupDatasetLocationStatus) Marshal added in v0.6.252

func (m *GroupDatasetLocationStatus) Marshal() (dAtA []byte, err error)

func (*GroupDatasetLocationStatus) MarshalTo added in v0.6.252

func (m *GroupDatasetLocationStatus) MarshalTo(dAtA []byte) (int, error)

func (*GroupDatasetLocationStatus) MarshalToSizedBuffer added in v0.6.252

func (m *GroupDatasetLocationStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*GroupDatasetLocationStatus) ProtoMessage added in v0.6.252

func (*GroupDatasetLocationStatus) ProtoMessage()

func (*GroupDatasetLocationStatus) Reset added in v0.6.252

func (m *GroupDatasetLocationStatus) Reset()

func (*GroupDatasetLocationStatus) Size added in v0.6.252

func (m *GroupDatasetLocationStatus) Size() (n int)

func (*GroupDatasetLocationStatus) String added in v0.6.252

func (this *GroupDatasetLocationStatus) String() string

func (*GroupDatasetLocationStatus) Unmarshal added in v0.6.252

func (m *GroupDatasetLocationStatus) Unmarshal(dAtA []byte) error

func (*GroupDatasetLocationStatus) XXX_DiscardUnknown added in v0.6.252

func (m *GroupDatasetLocationStatus) XXX_DiscardUnknown()

func (*GroupDatasetLocationStatus) XXX_Marshal added in v0.6.252

func (m *GroupDatasetLocationStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*GroupDatasetLocationStatus) XXX_Merge added in v0.6.252

func (m *GroupDatasetLocationStatus) XXX_Merge(src proto.Message)

func (*GroupDatasetLocationStatus) XXX_Size added in v0.6.252

func (m *GroupDatasetLocationStatus) XXX_Size() int

func (*GroupDatasetLocationStatus) XXX_Unmarshal added in v0.6.252

func (m *GroupDatasetLocationStatus) XXX_Unmarshal(b []byte) error

type HistogramSpec

type HistogramSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// name of the X column
	// +kubebuilder:validation:Optional
	X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"`
	// Show borther
	// +kubebuilder:validation:Optional
	Bins *int32 `json:"bins,omitempty" protobuf:"varint,3,opt,name=bins"`
}

func (*HistogramSpec) DeepCopy

func (in *HistogramSpec) DeepCopy() *HistogramSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HistogramSpec.

func (*HistogramSpec) DeepCopyInto

func (in *HistogramSpec) DeepCopyInto(out *HistogramSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*HistogramSpec) Descriptor

func (*HistogramSpec) Descriptor() ([]byte, []int)

func (*HistogramSpec) Marshal

func (m *HistogramSpec) Marshal() (dAtA []byte, err error)

func (*HistogramSpec) MarshalTo

func (m *HistogramSpec) MarshalTo(dAtA []byte) (int, error)

func (*HistogramSpec) MarshalToSizedBuffer

func (m *HistogramSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*HistogramSpec) ProtoMessage

func (*HistogramSpec) ProtoMessage()

func (*HistogramSpec) Reset

func (m *HistogramSpec) Reset()

func (*HistogramSpec) Size

func (m *HistogramSpec) Size() (n int)

func (*HistogramSpec) String

func (this *HistogramSpec) String() string

func (*HistogramSpec) Unmarshal

func (m *HistogramSpec) Unmarshal(dAtA []byte) error

func (*HistogramSpec) XXX_DiscardUnknown

func (m *HistogramSpec) XXX_DiscardUnknown()

func (*HistogramSpec) XXX_Marshal

func (m *HistogramSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*HistogramSpec) XXX_Merge

func (m *HistogramSpec) XXX_Merge(src proto.Message)

func (*HistogramSpec) XXX_Size

func (m *HistogramSpec) XXX_Size() int

func (*HistogramSpec) XXX_Unmarshal

func (m *HistogramSpec) XXX_Unmarshal(b []byte) error

type ImageLocation

type ImageLocation struct {
	// The canonical name of the image repository. If not set, it will default to docker/{dataproduct_name}
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=256
	Name *string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	// The image repository ConnectionName resource which exists in the same tenant as the parent DataProduct. If the field
	// is not set, Modela will ignore the image location and not push images
	// +kubebuilder:default:=""
	RegistryConnectionName *string `json:"registryConnectionName,omitempty" protobuf:"bytes,2,opt,name=registryConnectionName"`
}

ImageLocation specifies the destination for all model images produced under a DataProduct

func (*ImageLocation) DeepCopy

func (in *ImageLocation) DeepCopy() *ImageLocation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ImageLocation.

func (*ImageLocation) DeepCopyInto

func (in *ImageLocation) DeepCopyInto(out *ImageLocation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ImageLocation) Descriptor

func (*ImageLocation) Descriptor() ([]byte, []int)

func (*ImageLocation) Marshal

func (m *ImageLocation) Marshal() (dAtA []byte, err error)

func (*ImageLocation) MarshalTo

func (m *ImageLocation) MarshalTo(dAtA []byte) (int, error)

func (*ImageLocation) MarshalToSizedBuffer

func (m *ImageLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ImageLocation) ProtoMessage

func (*ImageLocation) ProtoMessage()

func (*ImageLocation) Reset

func (m *ImageLocation) Reset()

func (*ImageLocation) Size

func (m *ImageLocation) Size() (n int)

func (*ImageLocation) String

func (this *ImageLocation) String() string

func (*ImageLocation) Unmarshal

func (m *ImageLocation) Unmarshal(dAtA []byte) error

func (*ImageLocation) XXX_DiscardUnknown

func (m *ImageLocation) XXX_DiscardUnknown()

func (*ImageLocation) XXX_Marshal

func (m *ImageLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ImageLocation) XXX_Merge

func (m *ImageLocation) XXX_Merge(src proto.Message)

func (*ImageLocation) XXX_Size

func (m *ImageLocation) XXX_Size() int

func (*ImageLocation) XXX_Unmarshal

func (m *ImageLocation) XXX_Unmarshal(b []byte) error

type KPI

type KPI struct {
	// The name of the KPI
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Name *string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	// The value of the KPI
	Value *float64 `json:"value,omitempty" protobuf:"bytes,2,opt,name=value"`
}

KPI specifies a key performance indicator for a DataProduct. Currently not implemented.

func (*KPI) DeepCopy

func (in *KPI) DeepCopy() *KPI

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KPI.

func (*KPI) DeepCopyInto

func (in *KPI) DeepCopyInto(out *KPI)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*KPI) Descriptor

func (*KPI) Descriptor() ([]byte, []int)

func (*KPI) Marshal

func (m *KPI) Marshal() (dAtA []byte, err error)

func (*KPI) MarshalTo

func (m *KPI) MarshalTo(dAtA []byte) (int, error)

func (*KPI) MarshalToSizedBuffer

func (m *KPI) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*KPI) ProtoMessage

func (*KPI) ProtoMessage()

func (*KPI) Reset

func (m *KPI) Reset()

func (*KPI) Size

func (m *KPI) Size() (n int)

func (*KPI) String

func (this *KPI) String() string

func (*KPI) Unmarshal

func (m *KPI) Unmarshal(dAtA []byte) error

func (*KPI) XXX_DiscardUnknown

func (m *KPI) XXX_DiscardUnknown()

func (*KPI) XXX_Marshal

func (m *KPI) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*KPI) XXX_Merge

func (m *KPI) XXX_Merge(src proto.Message)

func (*KPI) XXX_Size

func (m *KPI) XXX_Size() int

func (*KPI) XXX_Unmarshal

func (m *KPI) XXX_Unmarshal(b []byte) error

type LabelingRule added in v0.4.821

type LabelingRule struct {
	//+kubebuilder:validation:Optional
	Column string `json:"column,omitempty" protobuf:"bytes,1,opt,name=column"`
	//+kubebuilder:validation:Optional
	Operator catalog.Op `json:"operator,omitempty" protobuf:"bytes,2,opt,name=operator"`
	//+kubebuilder:validation:Optional
	Value string `json:"value,omitempty" protobuf:"bytes,3,opt,name=value"`
}

Labeling rule define a column expression

func (*LabelingRule) DeepCopy added in v0.4.821

func (in *LabelingRule) DeepCopy() *LabelingRule

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingRule.

func (*LabelingRule) DeepCopyInto added in v0.4.821

func (in *LabelingRule) DeepCopyInto(out *LabelingRule)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingRule) Descriptor added in v0.4.821

func (*LabelingRule) Descriptor() ([]byte, []int)

func (*LabelingRule) Marshal added in v0.4.821

func (m *LabelingRule) Marshal() (dAtA []byte, err error)

func (*LabelingRule) MarshalTo added in v0.4.821

func (m *LabelingRule) MarshalTo(dAtA []byte) (int, error)

func (*LabelingRule) MarshalToSizedBuffer added in v0.4.821

func (m *LabelingRule) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingRule) ProtoMessage added in v0.4.821

func (*LabelingRule) ProtoMessage()

func (*LabelingRule) Reset added in v0.4.821

func (m *LabelingRule) Reset()

func (*LabelingRule) Size added in v0.4.821

func (m *LabelingRule) Size() (n int)

func (*LabelingRule) String added in v0.4.821

func (this *LabelingRule) String() string

func (*LabelingRule) Unmarshal added in v0.4.821

func (m *LabelingRule) Unmarshal(dAtA []byte) error

func (*LabelingRule) XXX_DiscardUnknown added in v0.4.821

func (m *LabelingRule) XXX_DiscardUnknown()

func (*LabelingRule) XXX_Marshal added in v0.4.821

func (m *LabelingRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingRule) XXX_Merge added in v0.4.821

func (m *LabelingRule) XXX_Merge(src proto.Message)

func (*LabelingRule) XXX_Size added in v0.4.821

func (m *LabelingRule) XXX_Size() int

func (*LabelingRule) XXX_Unmarshal added in v0.4.821

func (m *LabelingRule) XXX_Unmarshal(b []byte) error

type LabelingSpec added in v0.4.821

type LabelingSpec struct {
	// If true enable labeling.
	Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"`
	// The name of the column that will hold the result.
	ResultColumn string `json:"resultColumn,omitempty" protobuf:"bytes,2,opt,name=resultColumn"`
	// List of rules for positive rules.
	Positive []LabelingRule `json:"positive,omitempty" protobuf:"bytes,3,rep,name=positive"`
	// List of negative rules
	Negative []LabelingRule `json:"negative,omitempty" protobuf:"bytes,4,rep,name=negative"`
}

func (*LabelingSpec) DeepCopy added in v0.4.821

func (in *LabelingSpec) DeepCopy() *LabelingSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingSpec.

func (*LabelingSpec) DeepCopyInto added in v0.4.821

func (in *LabelingSpec) DeepCopyInto(out *LabelingSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingSpec) Descriptor added in v0.4.821

func (*LabelingSpec) Descriptor() ([]byte, []int)

func (*LabelingSpec) Marshal added in v0.4.821

func (m *LabelingSpec) Marshal() (dAtA []byte, err error)

func (*LabelingSpec) MarshalTo added in v0.4.821

func (m *LabelingSpec) MarshalTo(dAtA []byte) (int, error)

func (*LabelingSpec) MarshalToSizedBuffer added in v0.4.821

func (m *LabelingSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingSpec) ProtoMessage added in v0.4.821

func (*LabelingSpec) ProtoMessage()

func (*LabelingSpec) Reset added in v0.4.821

func (m *LabelingSpec) Reset()

func (*LabelingSpec) Size added in v0.4.821

func (m *LabelingSpec) Size() (n int)

func (*LabelingSpec) String added in v0.4.821

func (this *LabelingSpec) String() string

func (*LabelingSpec) Unmarshal added in v0.4.821

func (m *LabelingSpec) Unmarshal(dAtA []byte) error

func (*LabelingSpec) XXX_DiscardUnknown added in v0.4.821

func (m *LabelingSpec) XXX_DiscardUnknown()

func (*LabelingSpec) XXX_Marshal added in v0.4.821

func (m *LabelingSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingSpec) XXX_Merge added in v0.4.821

func (m *LabelingSpec) XXX_Merge(src proto.Message)

func (*LabelingSpec) XXX_Size added in v0.4.821

func (m *LabelingSpec) XXX_Size() int

func (*LabelingSpec) XXX_Unmarshal added in v0.4.821

func (m *LabelingSpec) XXX_Unmarshal(b []byte) error

type LineChartSpec

type LineChartSpec struct {
	// Dataset is the name of the dataset
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// name of the X column
	// +kubebuilder:validation:Optional
	X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"`
	// Y column
	// +kubebuilder:validation:Optional
	Y *string `json:"y,omitempty" protobuf:"bytes,3,opt,name=y"`
	// Show borther
	// +kubebuilder:validation:Optional
	Legend *bool `json:"legend,omitempty" protobuf:"varint,4,opt,name=legend"`
}

func (*LineChartSpec) DeepCopy

func (in *LineChartSpec) DeepCopy() *LineChartSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LineChartSpec.

func (*LineChartSpec) DeepCopyInto

func (in *LineChartSpec) DeepCopyInto(out *LineChartSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LineChartSpec) Descriptor

func (*LineChartSpec) Descriptor() ([]byte, []int)

func (*LineChartSpec) Marshal

func (m *LineChartSpec) Marshal() (dAtA []byte, err error)

func (*LineChartSpec) MarshalTo

func (m *LineChartSpec) MarshalTo(dAtA []byte) (int, error)

func (*LineChartSpec) MarshalToSizedBuffer

func (m *LineChartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LineChartSpec) ProtoMessage

func (*LineChartSpec) ProtoMessage()

func (*LineChartSpec) Reset

func (m *LineChartSpec) Reset()

func (*LineChartSpec) Size

func (m *LineChartSpec) Size() (n int)

func (*LineChartSpec) String

func (this *LineChartSpec) String() string

func (*LineChartSpec) Unmarshal

func (m *LineChartSpec) Unmarshal(dAtA []byte) error

func (*LineChartSpec) XXX_DiscardUnknown

func (m *LineChartSpec) XXX_DiscardUnknown()

func (*LineChartSpec) XXX_Marshal

func (m *LineChartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LineChartSpec) XXX_Merge

func (m *LineChartSpec) XXX_Merge(src proto.Message)

func (*LineChartSpec) XXX_Size

func (m *LineChartSpec) XXX_Size() int

func (*LineChartSpec) XXX_Unmarshal

func (m *LineChartSpec) XXX_Unmarshal(b []byte) error

type MaterializationSpec

type MaterializationSpec struct {
	// If true, update the online store
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Online *bool `json:"online,omitempty" protobuf:"varint,1,opt,name=online"`
	// If true update the offline store.
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Offline *bool `json:"offline,omitempty" protobuf:"varint,2,opt,name=offline"`
	// +kubebuilder:validation:Optional
	StartDate *metav1.Time `json:"startDate,omitempty" protobuf:"bytes,3,opt,name=startDate"`
	// +kubebuilder:validation:Optional
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:default:=0
	OfflineTTL *int32 `json:"offlineTTL,omitempty" protobuf:"varint,4,opt,name=offlineTTL"`
	// +kubebuilder:validation:Optional
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:default:=0
	OnlineTTL *int32 `json:"onlineTTL,omitempty" protobuf:"varint,5,opt,name=onlineTTL"`
	// Number of days to store information from the past in the feature store.
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:=21
	// +kubebuilder:validation:Minimum=0
	Backfill *int32 `json:"backfill,omitempty" protobuf:"varint,6,opt,name=backfill"`
	// Set the sync schedule between offline store and online store.
	//+kubebuilder:validation:Optional
	Schedule catalog.RunSchedule `json:"schedule,omitempty" protobuf:"bytes,7,opt,name=schedule"`
}

func (*MaterializationSpec) DeepCopy

func (in *MaterializationSpec) DeepCopy() *MaterializationSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializationSpec.

func (*MaterializationSpec) DeepCopyInto

func (in *MaterializationSpec) DeepCopyInto(out *MaterializationSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*MaterializationSpec) Descriptor

func (*MaterializationSpec) Descriptor() ([]byte, []int)

func (*MaterializationSpec) Marshal

func (m *MaterializationSpec) Marshal() (dAtA []byte, err error)

func (*MaterializationSpec) MarshalTo

func (m *MaterializationSpec) MarshalTo(dAtA []byte) (int, error)

func (*MaterializationSpec) MarshalToSizedBuffer

func (m *MaterializationSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*MaterializationSpec) ProtoMessage

func (*MaterializationSpec) ProtoMessage()

func (*MaterializationSpec) Reset

func (m *MaterializationSpec) Reset()

func (*MaterializationSpec) Size

func (m *MaterializationSpec) Size() (n int)

func (*MaterializationSpec) String

func (this *MaterializationSpec) String() string

func (*MaterializationSpec) Unmarshal

func (m *MaterializationSpec) Unmarshal(dAtA []byte) error

func (*MaterializationSpec) XXX_DiscardUnknown

func (m *MaterializationSpec) XXX_DiscardUnknown()

func (*MaterializationSpec) XXX_Marshal

func (m *MaterializationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*MaterializationSpec) XXX_Merge

func (m *MaterializationSpec) XXX_Merge(src proto.Message)

func (*MaterializationSpec) XXX_Size

func (m *MaterializationSpec) XXX_Size() int

func (*MaterializationSpec) XXX_Unmarshal

func (m *MaterializationSpec) XXX_Unmarshal(b []byte) error

type MetricSpec

type MetricSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// Column is the name of the column
	// +kubebuilder:validation:Optional
	Column *string `json:"column,omitempty" protobuf:"bytes,2,opt,name=column"`
	// Row is the row number
	// +kubebuilder:validation:Optional
	Row *int32 `json:"row,omitempty" protobuf:"varint,3,opt,name=row"`
	// Scalar the a const value
	// +kubebuilder:validation:Optional
	Scalar *string `json:"scalar,omitempty" protobuf:"bytes,4,opt,name=scalar"`
}

func (*MetricSpec) DeepCopy

func (in *MetricSpec) DeepCopy() *MetricSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricSpec.

func (*MetricSpec) DeepCopyInto

func (in *MetricSpec) DeepCopyInto(out *MetricSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*MetricSpec) Descriptor

func (*MetricSpec) Descriptor() ([]byte, []int)

func (*MetricSpec) Marshal

func (m *MetricSpec) Marshal() (dAtA []byte, err error)

func (*MetricSpec) MarshalTo

func (m *MetricSpec) MarshalTo(dAtA []byte) (int, error)

func (*MetricSpec) MarshalToSizedBuffer

func (m *MetricSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*MetricSpec) ProtoMessage

func (*MetricSpec) ProtoMessage()

func (*MetricSpec) Reset

func (m *MetricSpec) Reset()

func (*MetricSpec) Size

func (m *MetricSpec) Size() (n int)

func (*MetricSpec) String

func (this *MetricSpec) String() string

func (*MetricSpec) Unmarshal

func (m *MetricSpec) Unmarshal(dAtA []byte) error

func (*MetricSpec) XXX_DiscardUnknown

func (m *MetricSpec) XXX_DiscardUnknown()

func (*MetricSpec) XXX_Marshal

func (m *MetricSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*MetricSpec) XXX_Merge

func (m *MetricSpec) XXX_Merge(src proto.Message)

func (*MetricSpec) XXX_Size

func (m *MetricSpec) XXX_Size() int

func (*MetricSpec) XXX_Unmarshal

func (m *MetricSpec) XXX_Unmarshal(b []byte) error

type OutlierStatistics added in v0.6.252

type OutlierStatistics struct {
	// The number of outliers below baseline
	Lower int32 `json:"lower,omitempty" protobuf:"varint,1,opt,name=lower"`
	// The number of outliers above baseline
	Upper int32 `json:"upper,omitempty" protobuf:"varint,2,opt,name=upper"`
	// Percentage of rows detected as outliers
	Percent float32 `json:"percent,omitempty" protobuf:"bytes,3,opt,name=percent"`
}

func (*OutlierStatistics) DeepCopy added in v0.6.252

func (in *OutlierStatistics) DeepCopy() *OutlierStatistics

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OutlierStatistics.

func (*OutlierStatistics) DeepCopyInto added in v0.6.252

func (in *OutlierStatistics) DeepCopyInto(out *OutlierStatistics)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*OutlierStatistics) Descriptor added in v0.6.252

func (*OutlierStatistics) Descriptor() ([]byte, []int)

func (*OutlierStatistics) Marshal added in v0.6.252

func (m *OutlierStatistics) Marshal() (dAtA []byte, err error)

func (*OutlierStatistics) MarshalTo added in v0.6.252

func (m *OutlierStatistics) MarshalTo(dAtA []byte) (int, error)

func (*OutlierStatistics) MarshalToSizedBuffer added in v0.6.252

func (m *OutlierStatistics) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*OutlierStatistics) ProtoMessage added in v0.6.252

func (*OutlierStatistics) ProtoMessage()

func (*OutlierStatistics) Reset added in v0.6.252

func (m *OutlierStatistics) Reset()

func (*OutlierStatistics) Size added in v0.6.252

func (m *OutlierStatistics) Size() (n int)

func (*OutlierStatistics) String added in v0.6.252

func (this *OutlierStatistics) String() string

func (*OutlierStatistics) Unmarshal added in v0.6.252

func (m *OutlierStatistics) Unmarshal(dAtA []byte) error

func (*OutlierStatistics) XXX_DiscardUnknown added in v0.6.252

func (m *OutlierStatistics) XXX_DiscardUnknown()

func (*OutlierStatistics) XXX_Marshal added in v0.6.252

func (m *OutlierStatistics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*OutlierStatistics) XXX_Merge added in v0.6.252

func (m *OutlierStatistics) XXX_Merge(src proto.Message)

func (*OutlierStatistics) XXX_Size added in v0.6.252

func (m *OutlierStatistics) XXX_Size() int

func (*OutlierStatistics) XXX_Unmarshal added in v0.6.252

func (m *OutlierStatistics) XXX_Unmarshal(b []byte) error

type PageSpec

type PageSpec struct {
	Rows []RowSpec `json:"rows,omitempty" protobuf:"bytes,1,rep,name=rows"`
}

func (*PageSpec) DeepCopy

func (in *PageSpec) DeepCopy() *PageSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PageSpec.

func (*PageSpec) DeepCopyInto

func (in *PageSpec) DeepCopyInto(out *PageSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*PageSpec) Descriptor

func (*PageSpec) Descriptor() ([]byte, []int)

func (*PageSpec) Marshal

func (m *PageSpec) Marshal() (dAtA []byte, err error)

func (*PageSpec) MarshalTo

func (m *PageSpec) MarshalTo(dAtA []byte) (int, error)

func (*PageSpec) MarshalToSizedBuffer

func (m *PageSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*PageSpec) ProtoMessage

func (*PageSpec) ProtoMessage()

func (*PageSpec) Reset

func (m *PageSpec) Reset()

func (*PageSpec) Size

func (m *PageSpec) Size() (n int)

func (*PageSpec) String

func (this *PageSpec) String() string

func (*PageSpec) Unmarshal

func (m *PageSpec) Unmarshal(dAtA []byte) error

func (*PageSpec) XXX_DiscardUnknown

func (m *PageSpec) XXX_DiscardUnknown()

func (*PageSpec) XXX_Marshal

func (m *PageSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*PageSpec) XXX_Merge

func (m *PageSpec) XXX_Merge(src proto.Message)

func (*PageSpec) XXX_Size

func (m *PageSpec) XXX_Size() int

func (*PageSpec) XXX_Unmarshal

func (m *PageSpec) XXX_Unmarshal(b []byte) error

type ParquetFileSpec added in v0.4.861

type ParquetFileSpec struct {
	// +kubebuilder:default:="auto"
	// +kubebuilder:validation:Optional
	Engine *string `json:"engine,omitempty" protobuf:"bytes,1,opt,name=engine"`
}

ParquetFileSpec specifies the format of parquet file

func (*ParquetFileSpec) DeepCopy added in v0.4.861

func (in *ParquetFileSpec) DeepCopy() *ParquetFileSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParquetFileSpec.

func (*ParquetFileSpec) DeepCopyInto added in v0.4.861

func (in *ParquetFileSpec) DeepCopyInto(out *ParquetFileSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ParquetFileSpec) Descriptor added in v0.4.861

func (*ParquetFileSpec) Descriptor() ([]byte, []int)

func (*ParquetFileSpec) Marshal added in v0.4.861

func (m *ParquetFileSpec) Marshal() (dAtA []byte, err error)

func (*ParquetFileSpec) MarshalTo added in v0.4.861

func (m *ParquetFileSpec) MarshalTo(dAtA []byte) (int, error)

func (*ParquetFileSpec) MarshalToSizedBuffer added in v0.4.861

func (m *ParquetFileSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ParquetFileSpec) ProtoMessage added in v0.4.861

func (*ParquetFileSpec) ProtoMessage()

func (*ParquetFileSpec) Reset added in v0.4.861

func (m *ParquetFileSpec) Reset()

func (*ParquetFileSpec) Size added in v0.4.861

func (m *ParquetFileSpec) Size() (n int)

func (*ParquetFileSpec) String added in v0.4.861

func (this *ParquetFileSpec) String() string

func (*ParquetFileSpec) Unmarshal added in v0.4.861

func (m *ParquetFileSpec) Unmarshal(dAtA []byte) error

func (*ParquetFileSpec) XXX_DiscardUnknown added in v0.4.861

func (m *ParquetFileSpec) XXX_DiscardUnknown()

func (*ParquetFileSpec) XXX_Marshal added in v0.4.861

func (m *ParquetFileSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ParquetFileSpec) XXX_Merge added in v0.4.861

func (m *ParquetFileSpec) XXX_Merge(src proto.Message)

func (*ParquetFileSpec) XXX_Size added in v0.4.861

func (m *ParquetFileSpec) XXX_Size() int

func (*ParquetFileSpec) XXX_Unmarshal added in v0.4.861

func (m *ParquetFileSpec) XXX_Unmarshal(b []byte) error

type QuoteChar

type QuoteChar string

+kubebuilder:validation:Enum="double-quote";"single-quote";"auto"

const (
	SingleQuote QuoteChar = "single-quote"
	DoubleQuote QuoteChar = "double-quote"
	AutoQuote   QuoteChar = "double-quote"
)

type Recipe

type Recipe struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              RecipeSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status RecipeStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:resource:path=recipes,shortName=rc,singular=recipe,categories={data,modela,all} +kubebuilder:storageversion +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Last Run",type="date",JSONPath=".status.lastRun.at",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" Recipe represents a single batch of data

func (*Recipe) AddFinalizer

func (recipe *Recipe) AddFinalizer()

func (*Recipe) CreateOrUpdateCond

func (recipe *Recipe) CreateOrUpdateCond(cond metav1.Condition)

Merge or update condition

func (*Recipe) DeepCopy

func (in *Recipe) DeepCopy() *Recipe

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Recipe.

func (*Recipe) DeepCopyInto

func (in *Recipe) DeepCopyInto(out *Recipe)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Recipe) DeepCopyObject

func (in *Recipe) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Recipe) Default

func (recipe *Recipe) Default()

func (Recipe) Deleted

func (recipe Recipe) Deleted() bool

func (*Recipe) Descriptor

func (*Recipe) Descriptor() ([]byte, []int)

func (Recipe) GetCond

func (recipe Recipe) GetCond(t string) metav1.Condition

func (*Recipe) GetCondIdx

func (recipe *Recipe) GetCondIdx(t string) int

func (Recipe) GetObservedGeneration added in v0.6.252

func (recipe Recipe) GetObservedGeneration() int64

func (Recipe) GetStatus added in v0.6.252

func (recipe Recipe) GetStatus() proto.Message

func (Recipe) HasFinalizer

func (recipe Recipe) HasFinalizer() bool

func (Recipe) IsInCond

func (recipe Recipe) IsInCond(ct string) bool

func (Recipe) IsReady

func (recipe Recipe) IsReady() bool

func (*Recipe) MarkReady

func (recipe *Recipe) MarkReady()

func (*Recipe) Marshal

func (m *Recipe) Marshal() (dAtA []byte, err error)

func (*Recipe) MarshalTo

func (m *Recipe) MarshalTo(dAtA []byte) (int, error)

func (*Recipe) MarshalToSizedBuffer

func (m *Recipe) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Recipe) Populate

func (recipe *Recipe) Populate(name string)

func (Recipe) PrintConditions

func (recipe Recipe) PrintConditions()

func (*Recipe) ProtoMessage

func (*Recipe) ProtoMessage()

func (*Recipe) RemoveFinalizer

func (recipe *Recipe) RemoveFinalizer()

func (*Recipe) Reset

func (m *Recipe) Reset()

func (*Recipe) SetObservedGeneration added in v0.6.252

func (recipe *Recipe) SetObservedGeneration(generation int64)

func (*Recipe) SetStatus added in v0.6.252

func (recipe *Recipe) SetStatus(status interface{})

func (*Recipe) SetUpdatedAt added in v0.6.252

func (recipe *Recipe) SetUpdatedAt(time *metav1.Time)

func (*Recipe) SetupWebhookWithManager

func (recipe *Recipe) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Recipe) Size

func (m *Recipe) Size() (n int)

func (*Recipe) String

func (this *Recipe) String() string

func (*Recipe) Unmarshal

func (m *Recipe) Unmarshal(dAtA []byte) error

func (*Recipe) UpdateRunStatus added in v0.4.612

func (recipe *Recipe) UpdateRunStatus(run RecipeRun)

func (Recipe) ValidateCreate

func (recipe Recipe) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (Recipe) ValidateDelete

func (recipe Recipe) ValidateDelete() error

func (Recipe) ValidateUpdate

func (recipe Recipe) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*Recipe) XXX_DiscardUnknown

func (m *Recipe) XXX_DiscardUnknown()

func (*Recipe) XXX_Marshal

func (m *Recipe) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Recipe) XXX_Merge

func (m *Recipe) XXX_Merge(src proto.Message)

func (*Recipe) XXX_Size

func (m *Recipe) XXX_Size() int

func (*Recipe) XXX_Unmarshal

func (m *Recipe) XXX_Unmarshal(b []byte) error

type RecipeConditionType

type RecipeConditionType string

RecipeConditionType is the condition on the Recipe

type RecipeInputSpec

type RecipeInputSpec struct {
	// DatasetName is the name of the dataset
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// Location is the folder of the actual data resides, if not using dataset
	// +required.
	Location *catalog.DataLocation `json:"location,omitempty" protobuf:"bytes,2,opt,name=location"`
	// Format is the dataset format
	// +kubebuilder:default:=csv
	Format *FlatFileType `json:"format,omitempty" protobuf:"bytes,3,opt,name=format"`
}

RecipeInputSpec specify the input for a recipe

func (*RecipeInputSpec) DeepCopy

func (in *RecipeInputSpec) DeepCopy() *RecipeInputSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeInputSpec.

func (*RecipeInputSpec) DeepCopyInto

func (in *RecipeInputSpec) DeepCopyInto(out *RecipeInputSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeInputSpec) Descriptor

func (*RecipeInputSpec) Descriptor() ([]byte, []int)

func (*RecipeInputSpec) Marshal

func (m *RecipeInputSpec) Marshal() (dAtA []byte, err error)

func (*RecipeInputSpec) MarshalTo

func (m *RecipeInputSpec) MarshalTo(dAtA []byte) (int, error)

func (*RecipeInputSpec) MarshalToSizedBuffer

func (m *RecipeInputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeInputSpec) ProtoMessage

func (*RecipeInputSpec) ProtoMessage()

func (*RecipeInputSpec) Reset

func (m *RecipeInputSpec) Reset()

func (*RecipeInputSpec) Size

func (m *RecipeInputSpec) Size() (n int)

func (*RecipeInputSpec) String

func (this *RecipeInputSpec) String() string

func (*RecipeInputSpec) Unmarshal

func (m *RecipeInputSpec) Unmarshal(dAtA []byte) error

func (*RecipeInputSpec) XXX_DiscardUnknown

func (m *RecipeInputSpec) XXX_DiscardUnknown()

func (*RecipeInputSpec) XXX_Marshal

func (m *RecipeInputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeInputSpec) XXX_Merge

func (m *RecipeInputSpec) XXX_Merge(src proto.Message)

func (*RecipeInputSpec) XXX_Size

func (m *RecipeInputSpec) XXX_Size() int

func (*RecipeInputSpec) XXX_Unmarshal

func (m *RecipeInputSpec) XXX_Unmarshal(b []byte) error

type RecipeList

type RecipeList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []Recipe `json:"items" protobuf:"bytes,2,rep,name=items"`
}

RecipeList contains a list of Recipes +kubebuilder:object:root=true

func (*RecipeList) DeepCopy

func (in *RecipeList) DeepCopy() *RecipeList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeList.

func (*RecipeList) DeepCopyInto

func (in *RecipeList) DeepCopyInto(out *RecipeList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeList) DeepCopyObject

func (in *RecipeList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*RecipeList) Descriptor

func (*RecipeList) Descriptor() ([]byte, []int)

func (*RecipeList) Marshal

func (m *RecipeList) Marshal() (dAtA []byte, err error)

func (*RecipeList) MarshalTo

func (m *RecipeList) MarshalTo(dAtA []byte) (int, error)

func (*RecipeList) MarshalToSizedBuffer

func (m *RecipeList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeList) ProtoMessage

func (*RecipeList) ProtoMessage()

func (*RecipeList) Reset

func (m *RecipeList) Reset()

func (*RecipeList) Size

func (m *RecipeList) Size() (n int)

func (*RecipeList) String

func (this *RecipeList) String() string

func (*RecipeList) Unmarshal

func (m *RecipeList) Unmarshal(dAtA []byte) error

func (*RecipeList) XXX_DiscardUnknown

func (m *RecipeList) XXX_DiscardUnknown()

func (*RecipeList) XXX_Marshal

func (m *RecipeList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeList) XXX_Merge

func (m *RecipeList) XXX_Merge(src proto.Message)

func (*RecipeList) XXX_Size

func (m *RecipeList) XXX_Size() int

func (*RecipeList) XXX_Unmarshal

func (m *RecipeList) XXX_Unmarshal(b []byte) error

type RecipeOutputSpec

type RecipeOutputSpec struct {
	// CreateDataset if true, create a new dataset when the recipe is done.
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	CreateDataset *bool `json:"createDataset,omitempty" protobuf:"varint,1,opt,name=createDataset"`
	// DatasetName is the name of the dataset output to the recipe
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,2,opt,name=datasetName"`
	// Location is the data location folder of the actual data resides.
	// +kubebuilder:validation:Required
	// +required
	Location *catalog.DataLocation `json:"location,omitempty" protobuf:"bytes,3,opt,name=location"`
}

RecipeOutputSpec for the recipe output

func (*RecipeOutputSpec) DeepCopy

func (in *RecipeOutputSpec) DeepCopy() *RecipeOutputSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeOutputSpec.

func (*RecipeOutputSpec) DeepCopyInto

func (in *RecipeOutputSpec) DeepCopyInto(out *RecipeOutputSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeOutputSpec) Descriptor

func (*RecipeOutputSpec) Descriptor() ([]byte, []int)

func (*RecipeOutputSpec) Marshal

func (m *RecipeOutputSpec) Marshal() (dAtA []byte, err error)

func (*RecipeOutputSpec) MarshalTo

func (m *RecipeOutputSpec) MarshalTo(dAtA []byte) (int, error)

func (*RecipeOutputSpec) MarshalToSizedBuffer

func (m *RecipeOutputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeOutputSpec) ProtoMessage

func (*RecipeOutputSpec) ProtoMessage()

func (*RecipeOutputSpec) Reset

func (m *RecipeOutputSpec) Reset()

func (*RecipeOutputSpec) Size

func (m *RecipeOutputSpec) Size() (n int)

func (*RecipeOutputSpec) String

func (this *RecipeOutputSpec) String() string

func (*RecipeOutputSpec) Unmarshal

func (m *RecipeOutputSpec) Unmarshal(dAtA []byte) error

func (*RecipeOutputSpec) XXX_DiscardUnknown

func (m *RecipeOutputSpec) XXX_DiscardUnknown()

func (*RecipeOutputSpec) XXX_Marshal

func (m *RecipeOutputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeOutputSpec) XXX_Merge

func (m *RecipeOutputSpec) XXX_Merge(src proto.Message)

func (*RecipeOutputSpec) XXX_Size

func (m *RecipeOutputSpec) XXX_Size() int

func (*RecipeOutputSpec) XXX_Unmarshal

func (m *RecipeOutputSpec) XXX_Unmarshal(b []byte) error

type RecipePartSpec

type RecipePartSpec struct {
	// RecipeName is the name of the recipe to run
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	RecipeName *string `json:"recipeName,omitempty" protobuf:"bytes,1,opt,name=recipeName"`
	// Dependents is the list of recipe that need to run after this recipe.
	Dependents []string `json:"dependents,omitempty" protobuf:"bytes,2,rep,name=dependents"`
}

func (*RecipePartSpec) DeepCopy

func (in *RecipePartSpec) DeepCopy() *RecipePartSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipePartSpec.

func (*RecipePartSpec) DeepCopyInto

func (in *RecipePartSpec) DeepCopyInto(out *RecipePartSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipePartSpec) Descriptor

func (*RecipePartSpec) Descriptor() ([]byte, []int)

func (*RecipePartSpec) Marshal

func (m *RecipePartSpec) Marshal() (dAtA []byte, err error)

func (*RecipePartSpec) MarshalTo

func (m *RecipePartSpec) MarshalTo(dAtA []byte) (int, error)

func (*RecipePartSpec) MarshalToSizedBuffer

func (m *RecipePartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipePartSpec) ProtoMessage

func (*RecipePartSpec) ProtoMessage()

func (*RecipePartSpec) Reset

func (m *RecipePartSpec) Reset()

func (*RecipePartSpec) Size

func (m *RecipePartSpec) Size() (n int)

func (*RecipePartSpec) String

func (this *RecipePartSpec) String() string

func (*RecipePartSpec) Unmarshal

func (m *RecipePartSpec) Unmarshal(dAtA []byte) error

func (*RecipePartSpec) XXX_DiscardUnknown

func (m *RecipePartSpec) XXX_DiscardUnknown()

func (*RecipePartSpec) XXX_Marshal

func (m *RecipePartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipePartSpec) XXX_Merge

func (m *RecipePartSpec) XXX_Merge(src proto.Message)

func (*RecipePartSpec) XXX_Size

func (m *RecipePartSpec) XXX_Size() int

func (*RecipePartSpec) XXX_Unmarshal

func (m *RecipePartSpec) XXX_Unmarshal(b []byte) error

type RecipeRun

type RecipeRun struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              RecipeRunSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status RecipeRunStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:subresource:status +kubebuilder:resource:path=reciperuns,shortName=rcr,singular=reciperun,categories={data,modela,all} +kubebuilder:object:root=true +kubebuilder:storageversion +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Recipe",type="string",JSONPath=".spec.recipeName" +kubebuilder:printcolumn:name="StartedAt",type="date",JSONPath=".status.startTime",priority=1 +kubebuilder:printcolumn:name="CompletedAt",type="date",JSONPath=".status.completionTime",priority=1 RecipeRun represent one execution of the recipe. Execution is performed by creating a Kubernetes job.

func (*RecipeRun) AddFinalizer

func (reciperun *RecipeRun) AddFinalizer()

func (RecipeRun) CompletionAlert added in v0.4.601

func (reciperun RecipeRun) CompletionAlert(notification catalog.NotificationSpec) *infra.Alert

Generate a dataset completion alert

func (*RecipeRun) CreateOrUpdateCond

func (reciperun *RecipeRun) CreateOrUpdateCond(cond metav1.Condition)

Merge or update condition

func (*RecipeRun) DeepCopy

func (in *RecipeRun) DeepCopy() *RecipeRun

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRun.

func (*RecipeRun) DeepCopyInto

func (in *RecipeRun) DeepCopyInto(out *RecipeRun)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeRun) DeepCopyObject

func (in *RecipeRun) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*RecipeRun) Default

func (reciperun *RecipeRun) Default()

func (*RecipeRun) Deleted

func (reciperun *RecipeRun) Deleted() bool

func (*RecipeRun) Descriptor

func (*RecipeRun) Descriptor() ([]byte, []int)

func (RecipeRun) ErrorAlert added in v0.4.601

func (reciperun RecipeRun) ErrorAlert(notification catalog.NotificationSpec, err error) *infra.Alert

func (RecipeRun) GetCond

func (reciperun RecipeRun) GetCond(t string) metav1.Condition

func (RecipeRun) GetCondIdx

func (reciperun RecipeRun) GetCondIdx(t string) int

func (RecipeRun) GetObservedGeneration added in v0.6.252

func (reciperun RecipeRun) GetObservedGeneration() int64

func (RecipeRun) GetStatus added in v0.6.252

func (reciperun RecipeRun) GetStatus() proto.Message

func (RecipeRun) HasFinalizer

func (reciperun RecipeRun) HasFinalizer() bool

func (*RecipeRun) IsFailed

func (reciperun *RecipeRun) IsFailed() bool

func (RecipeRun) IsInCond

func (reciperun RecipeRun) IsInCond(ct string) bool

func (RecipeRun) IsReady

func (reciperun RecipeRun) IsReady() bool

func (*RecipeRun) IsRunning

func (reciperun *RecipeRun) IsRunning() bool

func (RecipeRun) ManifestURI added in v0.5.472

func (reciperun RecipeRun) ManifestURI() string

func (*RecipeRun) MarkCompleted

func (reciperun *RecipeRun) MarkCompleted()

func (*RecipeRun) MarkFailed

func (reciperun *RecipeRun) MarkFailed(error string)

func (*RecipeRun) MarkRunning

func (reciperun *RecipeRun) MarkRunning()

func (*RecipeRun) Marshal

func (m *RecipeRun) Marshal() (dAtA []byte, err error)

func (*RecipeRun) MarshalTo

func (m *RecipeRun) MarshalTo(dAtA []byte) (int, error)

func (*RecipeRun) MarshalToSizedBuffer

func (m *RecipeRun) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (RecipeRun) PrintConditions

func (reciperun RecipeRun) PrintConditions()

func (*RecipeRun) ProtoMessage

func (*RecipeRun) ProtoMessage()

func (*RecipeRun) RemoveFinalizer

func (reciperun *RecipeRun) RemoveFinalizer()

func (RecipeRun) ReportName

func (reciperun RecipeRun) ReportName() string

func (*RecipeRun) Reset

func (m *RecipeRun) Reset()

func (RecipeRun) RootURI added in v0.5.472

func (reciperun RecipeRun) RootURI() string

func (RecipeRun) RunStatus added in v0.4.614

func (reciperun RecipeRun) RunStatus() *catalog.LastRunStatus

Return the state of the run as RunStatus

func (*RecipeRun) SetObservedGeneration added in v0.6.252

func (reciperun *RecipeRun) SetObservedGeneration(generation int64)

func (*RecipeRun) SetStatus added in v0.6.252

func (reciperun *RecipeRun) SetStatus(status interface{})

func (*RecipeRun) SetUpdatedAt added in v0.6.252

func (reciperun *RecipeRun) SetUpdatedAt(time *metav1.Time)

func (*RecipeRun) SetupWebhookWithManager

func (reciperun *RecipeRun) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*RecipeRun) Size

func (m *RecipeRun) Size() (n int)

func (RecipeRun) StatusString

func (reciperun RecipeRun) StatusString() string

func (*RecipeRun) String

func (this *RecipeRun) String() string

func (*RecipeRun) Unmarshal

func (m *RecipeRun) Unmarshal(dAtA []byte) error

func (*RecipeRun) ValidateCreate

func (reciperun *RecipeRun) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*RecipeRun) ValidateDelete

func (reciperun *RecipeRun) ValidateDelete() error

func (*RecipeRun) ValidateUpdate

func (reciperun *RecipeRun) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*RecipeRun) XXX_DiscardUnknown

func (m *RecipeRun) XXX_DiscardUnknown()

func (*RecipeRun) XXX_Marshal

func (m *RecipeRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeRun) XXX_Merge

func (m *RecipeRun) XXX_Merge(src proto.Message)

func (*RecipeRun) XXX_Size

func (m *RecipeRun) XXX_Size() int

func (*RecipeRun) XXX_Unmarshal

func (m *RecipeRun) XXX_Unmarshal(b []byte) error

type RecipeRunList

type RecipeRunList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []RecipeRun `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true RecipeRunList contains a list of Recipes

func (*RecipeRunList) DeepCopy

func (in *RecipeRunList) DeepCopy() *RecipeRunList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRunList.

func (*RecipeRunList) DeepCopyInto

func (in *RecipeRunList) DeepCopyInto(out *RecipeRunList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeRunList) DeepCopyObject

func (in *RecipeRunList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*RecipeRunList) Descriptor

func (*RecipeRunList) Descriptor() ([]byte, []int)

func (*RecipeRunList) Marshal

func (m *RecipeRunList) Marshal() (dAtA []byte, err error)

func (*RecipeRunList) MarshalTo

func (m *RecipeRunList) MarshalTo(dAtA []byte) (int, error)

func (*RecipeRunList) MarshalToSizedBuffer

func (m *RecipeRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeRunList) ProtoMessage

func (*RecipeRunList) ProtoMessage()

func (*RecipeRunList) Reset

func (m *RecipeRunList) Reset()

func (*RecipeRunList) Size

func (m *RecipeRunList) Size() (n int)

func (*RecipeRunList) String

func (this *RecipeRunList) String() string

func (*RecipeRunList) Unmarshal

func (m *RecipeRunList) Unmarshal(dAtA []byte) error

func (*RecipeRunList) XXX_DiscardUnknown

func (m *RecipeRunList) XXX_DiscardUnknown()

func (*RecipeRunList) XXX_Marshal

func (m *RecipeRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeRunList) XXX_Merge

func (m *RecipeRunList) XXX_Merge(src proto.Message)

func (*RecipeRunList) XXX_Size

func (m *RecipeRunList) XXX_Size() int

func (*RecipeRunList) XXX_Unmarshal

func (m *RecipeRunList) XXX_Unmarshal(b []byte) error

type RecipeRunPhase

type RecipeRunPhase string
const (
	RecipeRunPhasePending     RecipeRunPhase = "Pending"
	RecipeRunPhaseAborted     RecipeRunPhase = "Aborted"
	RecipeRunPhaseRunning     RecipeRunPhase = "Running"
	RecipeRunPhaseUnitTesting RecipeRunPhase = "UnitTesting"
	RecipeRunPhaseSucceed     RecipeRunPhase = "Completed"
	RecipeRunPhaseFailed      RecipeRunPhase = "Failed"
)

type RecipeRunSpec

type RecipeRunSpec struct {
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"`
	// The name of the recipe that execute this run
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	RecipeName *string `json:"recipeName,omitempty" protobuf:"bytes,2,opt,name=recipeName"`
	// The execution env of this recipes
	// +kubebuilder:validation:Optional
	LabRef v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,3,opt,name=labRef"`
	// The location of the data output.
	// +required.
	Location catalog.DataLocation `json:"location,omitempty" protobuf:"bytes,4,opt,name=location"`
	// Resources are hardware resource req for a recipe run.
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,5,opt,name=resources"`
	// TTL.
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,6,opt,name=ttl"`
	// The model class for this data pipeline run if the dataset was created by a model class
	// +kubebuilder:validation:Optional
	ModelClassName *string `json:"modelClassName,omitempty" protobuf:"bytes,7,opt,name=modelClassName"`
	// If this report was created by a data pipeline run, this is the run name
	// +kubebuilder:validation:Optional
	ModelClassRunName *string `json:"modelClassRunName,omitempty" protobuf:"bytes,8,opt,name=modelClassRunName"`
}

RecipeSpec defines the desired state of a dataset

func (*RecipeRunSpec) DeepCopy

func (in *RecipeRunSpec) DeepCopy() *RecipeRunSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRunSpec.

func (*RecipeRunSpec) DeepCopyInto

func (in *RecipeRunSpec) DeepCopyInto(out *RecipeRunSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeRunSpec) Descriptor

func (*RecipeRunSpec) Descriptor() ([]byte, []int)

func (*RecipeRunSpec) Marshal

func (m *RecipeRunSpec) Marshal() (dAtA []byte, err error)

func (*RecipeRunSpec) MarshalTo

func (m *RecipeRunSpec) MarshalTo(dAtA []byte) (int, error)

func (*RecipeRunSpec) MarshalToSizedBuffer

func (m *RecipeRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeRunSpec) ProtoMessage

func (*RecipeRunSpec) ProtoMessage()

func (*RecipeRunSpec) Reset

func (m *RecipeRunSpec) Reset()

func (*RecipeRunSpec) Size

func (m *RecipeRunSpec) Size() (n int)

func (*RecipeRunSpec) String

func (this *RecipeRunSpec) String() string

func (*RecipeRunSpec) Unmarshal

func (m *RecipeRunSpec) Unmarshal(dAtA []byte) error

func (*RecipeRunSpec) XXX_DiscardUnknown

func (m *RecipeRunSpec) XXX_DiscardUnknown()

func (*RecipeRunSpec) XXX_Marshal

func (m *RecipeRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeRunSpec) XXX_Merge

func (m *RecipeRunSpec) XXX_Merge(src proto.Message)

func (*RecipeRunSpec) XXX_Size

func (m *RecipeRunSpec) XXX_Size() int

func (*RecipeRunSpec) XXX_Unmarshal

func (m *RecipeRunSpec) XXX_Unmarshal(b []byte) error

type RecipeRunStatus

type RecipeRunStatus struct {
	// Represents the end time
	// +kubebuilder:validation:Optional
	CompletedAt *metav1.Time `json:"completedAt,omitempty" protobuf:"bytes,1,opt,name=completedAt"`
	// The phase of the dataset processing
	// +kubebuilder:default:="Pending"
	// +kubebuilder:validation:Optional
	Phase RecipeRunPhase `json:"phase,omitempty" protobuf:"bytes,2,opt,name=phase"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,3,opt,name=observedGeneration"`
	// UpdateUpdateStrategy in case of terminal failure
	// Borrowed from cluster api controller
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,4,opt,name=failureReason"`
	// UpdateUpdateStrategy in case of terminal failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,5,opt,name=failureMessage"`
	// What triggered the run
	//+kubebuilder:validation:Optional
	TriggeredBy catalog.TriggerType `json:"triggeredBy,omitempty" protobuf:"bytes,6,opt,name=triggeredBy"`
	// Holds the location of log paths
	//+kubebuilder:validation:Optional
	Logs catalog.Logs `json:"logs,,omitempty" protobuf:"bytes,7,opt,name=logs"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,9,opt,name=updatedAt"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,11,rep,name=conditions"`
}

RecipeStatus defines the observed state of Recipe

func (*RecipeRunStatus) DeepCopy

func (in *RecipeRunStatus) DeepCopy() *RecipeRunStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRunStatus.

func (*RecipeRunStatus) DeepCopyInto

func (in *RecipeRunStatus) DeepCopyInto(out *RecipeRunStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeRunStatus) Descriptor

func (*RecipeRunStatus) Descriptor() ([]byte, []int)

func (*RecipeRunStatus) Marshal

func (m *RecipeRunStatus) Marshal() (dAtA []byte, err error)

func (*RecipeRunStatus) MarshalTo

func (m *RecipeRunStatus) MarshalTo(dAtA []byte) (int, error)

func (*RecipeRunStatus) MarshalToSizedBuffer

func (m *RecipeRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeRunStatus) ProtoMessage

func (*RecipeRunStatus) ProtoMessage()

func (*RecipeRunStatus) Reset

func (m *RecipeRunStatus) Reset()

func (*RecipeRunStatus) Size

func (m *RecipeRunStatus) Size() (n int)

func (*RecipeRunStatus) String

func (this *RecipeRunStatus) String() string

func (*RecipeRunStatus) Unmarshal

func (m *RecipeRunStatus) Unmarshal(dAtA []byte) error

func (*RecipeRunStatus) XXX_DiscardUnknown

func (m *RecipeRunStatus) XXX_DiscardUnknown()

func (*RecipeRunStatus) XXX_Marshal

func (m *RecipeRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeRunStatus) XXX_Merge

func (m *RecipeRunStatus) XXX_Merge(src proto.Message)

func (*RecipeRunStatus) XXX_Size

func (m *RecipeRunStatus) XXX_Size() int

func (*RecipeRunStatus) XXX_Unmarshal

func (m *RecipeRunStatus) XXX_Unmarshal(b []byte) error

type RecipeSpec

type RecipeSpec struct {
	// Owner is the owner of the recipe
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// VersionName is the data product version of the recipe
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=63
	// required.
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// Description is the user provided description
	// +kubebuilder:validation:MaxLength=512
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// Input is the input recipe spec
	Input RecipeInputSpec `json:"input,omitempty" protobuf:"bytes,4,opt,name=input"`
	// Steps are the list of recipe steps
	Steps []RecipeStep `json:"steps,omitempty" protobuf:"bytes,5,rep,name=steps"`
	// Output is the desired output
	Output RecipeOutputSpec `json:"output,omitempty" protobuf:"bytes,6,opt,name=output"`
	// Sample specify the sampling paramters when viewing the recipe
	// +kubebuilder:validation:Optional
	Sample SampleSpec `json:"sample,omitempty" protobuf:"bytes,7,opt,name=sample"`
	// Resource define the resource requirements to run the recipe
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,8,opt,name=resources"`
	// Timeout is the deadline setup on jobs for this recipe.
	// +kubebuilder:default:=600
	// +kubebuilder:validation:Optional
	Timeout *int64 `json:"timeout,omitempty" protobuf:"varint,10,opt,name=timeout"`
	// TTL.
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,11,opt,name=ttl"`
	// Unit tests templates operating on the recipe run.
	// +kubebuilder:validation:Optional
	UnitTestsTemplate catalog.TestSuite `json:"unitTestsTemplate,omitempty" protobuf:"bytes,12,opt,name=unitTestsTemplate"`
}

RecipeSpec defines the desired state of a dataset

func (*RecipeSpec) DeepCopy

func (in *RecipeSpec) DeepCopy() *RecipeSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeSpec.

func (*RecipeSpec) DeepCopyInto

func (in *RecipeSpec) DeepCopyInto(out *RecipeSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeSpec) Descriptor

func (*RecipeSpec) Descriptor() ([]byte, []int)

func (*RecipeSpec) Marshal

func (m *RecipeSpec) Marshal() (dAtA []byte, err error)

func (*RecipeSpec) MarshalTo

func (m *RecipeSpec) MarshalTo(dAtA []byte) (int, error)

func (*RecipeSpec) MarshalToSizedBuffer

func (m *RecipeSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeSpec) ProtoMessage

func (*RecipeSpec) ProtoMessage()

func (*RecipeSpec) Reset

func (m *RecipeSpec) Reset()

func (*RecipeSpec) Size

func (m *RecipeSpec) Size() (n int)

func (*RecipeSpec) String

func (this *RecipeSpec) String() string

func (*RecipeSpec) Unmarshal

func (m *RecipeSpec) Unmarshal(dAtA []byte) error

func (*RecipeSpec) XXX_DiscardUnknown

func (m *RecipeSpec) XXX_DiscardUnknown()

func (*RecipeSpec) XXX_Marshal

func (m *RecipeSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeSpec) XXX_Merge

func (m *RecipeSpec) XXX_Merge(src proto.Message)

func (*RecipeSpec) XXX_Size

func (m *RecipeSpec) XXX_Size() int

func (*RecipeSpec) XXX_Unmarshal

func (m *RecipeSpec) XXX_Unmarshal(b []byte) error

type RecipeStatus

type RecipeStatus struct {
	//ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// Last run is the last time a data pipeline run was created
	//+kubebuilder:validation:Optional
	LastRun catalog.LastRunStatus `json:"lastRun,omitempty" protobuf:"bytes,2,opt,name=lastRun"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,4,opt,name=updatedAt"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,7,rep,name=conditions"`
}

RecipeStatus defines the observed state of Recipe

func (*RecipeStatus) DeepCopy

func (in *RecipeStatus) DeepCopy() *RecipeStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeStatus.

func (*RecipeStatus) DeepCopyInto

func (in *RecipeStatus) DeepCopyInto(out *RecipeStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeStatus) Descriptor

func (*RecipeStatus) Descriptor() ([]byte, []int)

func (*RecipeStatus) Marshal

func (m *RecipeStatus) Marshal() (dAtA []byte, err error)

func (*RecipeStatus) MarshalTo

func (m *RecipeStatus) MarshalTo(dAtA []byte) (int, error)

func (*RecipeStatus) MarshalToSizedBuffer

func (m *RecipeStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeStatus) ProtoMessage

func (*RecipeStatus) ProtoMessage()

func (*RecipeStatus) Reset

func (m *RecipeStatus) Reset()

func (*RecipeStatus) Size

func (m *RecipeStatus) Size() (n int)

func (*RecipeStatus) String

func (this *RecipeStatus) String() string

func (*RecipeStatus) Unmarshal

func (m *RecipeStatus) Unmarshal(dAtA []byte) error

func (*RecipeStatus) XXX_DiscardUnknown

func (m *RecipeStatus) XXX_DiscardUnknown()

func (*RecipeStatus) XXX_Marshal

func (m *RecipeStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeStatus) XXX_Merge

func (m *RecipeStatus) XXX_Merge(src proto.Message)

func (*RecipeStatus) XXX_Size

func (m *RecipeStatus) XXX_Size() int

func (*RecipeStatus) XXX_Unmarshal

func (m *RecipeStatus) XXX_Unmarshal(b []byte) error

type RecipeStep

type RecipeStep struct {
	Op         RecipeStepOperation `json:"op,omitempty" protobuf:"bytes,1,opt,name=op"`
	Parameters []*RecipeStepParam  `json:"parameters,omitempty" protobuf:"bytes,2,rep,name=parameters"`
}

RecipeStep defines one step in the recipe

func (*RecipeStep) DeepCopy

func (in *RecipeStep) DeepCopy() *RecipeStep

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeStep.

func (*RecipeStep) DeepCopyInto

func (in *RecipeStep) DeepCopyInto(out *RecipeStep)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeStep) Descriptor

func (*RecipeStep) Descriptor() ([]byte, []int)

func (*RecipeStep) Marshal

func (m *RecipeStep) Marshal() (dAtA []byte, err error)

func (*RecipeStep) MarshalTo

func (m *RecipeStep) MarshalTo(dAtA []byte) (int, error)

func (*RecipeStep) MarshalToSizedBuffer

func (m *RecipeStep) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeStep) ProtoMessage

func (*RecipeStep) ProtoMessage()

func (*RecipeStep) Reset

func (m *RecipeStep) Reset()

func (*RecipeStep) Size

func (m *RecipeStep) Size() (n int)

func (*RecipeStep) String

func (this *RecipeStep) String() string

func (*RecipeStep) Unmarshal

func (m *RecipeStep) Unmarshal(dAtA []byte) error

func (*RecipeStep) XXX_DiscardUnknown

func (m *RecipeStep) XXX_DiscardUnknown()

func (*RecipeStep) XXX_Marshal

func (m *RecipeStep) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeStep) XXX_Merge

func (m *RecipeStep) XXX_Merge(src proto.Message)

func (*RecipeStep) XXX_Size

func (m *RecipeStep) XXX_Size() int

func (*RecipeStep) XXX_Unmarshal

func (m *RecipeStep) XXX_Unmarshal(b []byte) error

type RecipeStepOperation

type RecipeStepOperation string

RecipeStepOperation is the operation name of one step in the recipe

const (

	// Aggregate function
	AnyStepOp                   RecipeStepOperation = "any"
	AnyIfStepOp                 RecipeStepOperation = "any-if"
	ApproximateMedianStepOp     RecipeStepOperation = "approximate-median"
	ApproximatePercentileStepOp RecipeStepOperation = "approximate-percentile"
	ApproximateQuartileStepOp   RecipeStepOperation = "approximate-quartile"
	AverageStepOp               RecipeStepOperation = "average"
	AverageIfStepOp             RecipeStepOperation = "average-if"
	CorrelStepOp                RecipeStepOperation = "correl"
	CountAStepOp                RecipeStepOperation = "counta"
	CountAIfStepOp              RecipeStepOperation = "counta-if"
	CountDistinctStepOp         RecipeStepOperation = "counta-distinct"
	CountDistinctIfStepOp       RecipeStepOperation = "counta-distinct-if"
	CountStepOp                 RecipeStepOperation = "count"
	CountIfStepOp               RecipeStepOperation = "count-if"
	CovarStepOp                 RecipeStepOperation = "covar"
	KthLargestStepOp            RecipeStepOperation = "kth-largest"
	KthLargestIfStepOp          RecipeStepOperation = "kth-largest-if"
	KthLargestUniqueStepOp      RecipeStepOperation = "kth-largest-unique"
	KthLargestUniqueIfStepOp    RecipeStepOperation = "kth-largest-unique-if"
	ListStepOp                  RecipeStepOperation = "list"
	ListIfStepOp                RecipeStepOperation = "list-if"
	MaxStepOp                   RecipeStepOperation = "max"
	MaxIfStepOp                 RecipeStepOperation = "max-if"
	MedianStepOp                RecipeStepOperation = "median"
	MinStepOp                   RecipeStepOperation = "min"
	MinIfStepOp                 RecipeStepOperation = "min-if"
	ModeStepOp                  RecipeStepOperation = "mode"
	ModeIfStepOp                RecipeStepOperation = "mode-if"
	PercentileStepOp            RecipeStepOperation = "percentile"
	QuartileStepOp              RecipeStepOperation = "quartile"
	StdDevStepOp                RecipeStepOperation = "stddev"
	StdDevIfStepOp              RecipeStepOperation = "stddev-if"
	SumStepOp                   RecipeStepOperation = "sum"
	SumIfStepOp                 RecipeStepOperation = "sum-if"
	UniqueStepOp                RecipeStepOperation = "unique"
	VarStepOp                   RecipeStepOperation = "var"
	VarIfStepOp                 RecipeStepOperation = "var-if"

	// Logical functions
	OrStepOp  RecipeStepOperation = "or"
	AndStepOp RecipeStepOperation = "and"
	NotStepOp RecipeStepOperation = "not"

	// Comparison
	ComparisonStepOp       RecipeStepOperation = "comparison"
	IsEvenStepOp           RecipeStepOperation = "is-even"
	IsOddStepOp            RecipeStepOperation = "is-odd"
	InStepOp               RecipeStepOperation = "in"
	MatchesStepOp          RecipeStepOperation = "matches"
	EqualStepOp            RecipeStepOperation = "equal"
	NotEqualStepOp         RecipeStepOperation = "not-equal"
	GreaterThanStepOp      RecipeStepOperation = "greater-than"
	GreaterThanEqualStepOp RecipeStepOperation = "greater-than-equal"
	LessThanStepOp         RecipeStepOperation = "less-than"
	LessThanEqualOp        RecipeStepOperation = "less-than-equal"

	// Math
	AddStepOp           RecipeStepOperation = "add"
	SubstractStepOp     RecipeStepOperation = "subtract"
	MultiplyStepOp      RecipeStepOperation = "multiply"
	DivideOp            RecipeStepOperation = "divide"
	ModStepOp           RecipeStepOperation = "mod"
	NegateStepOp        RecipeStepOperation = "negate"
	SignStepOp          RecipeStepOperation = "sign"
	LcmStepOp           RecipeStepOperation = "lcm"
	AbsoluteStepOp      RecipeStepOperation = "absolute"
	ExponentStepOp      RecipeStepOperation = "exponent"
	LogStepOp           RecipeStepOperation = "log"
	LnStepOp            RecipeStepOperation = "ln"
	PowerStepOp         RecipeStepOperation = "power"
	SquareRootStepOp    RecipeStepOperation = "sqr"
	CeilingStepOp       RecipeStepOperation = "ceiling"
	FloorStepOp         RecipeStepOperation = "floor"
	RoundStepOp         RecipeStepOperation = "round"
	TruncStepOp         RecipeStepOperation = "trunc"
	PIStepOp            RecipeStepOperation = "pi"
	RandomStepOp        RecipeStepOperation = "random"
	RandomBetweenStepOp RecipeStepOperation = "random-between"

	// Trig
	SinStepOp     RecipeStepOperation = "sin"
	CosStepOp     RecipeStepOperation = "cos"
	TanStepOp     RecipeStepOperation = "tan"
	ASinStepOp    RecipeStepOperation = "asin"
	ACosStepOp    RecipeStepOperation = "acos"
	ATanStepOp    RecipeStepOperation = "atan"
	SinHStepOp    RecipeStepOperation = "sinh"
	CosHStepOp    RecipeStepOperation = "cosh"
	TanHStepOp    RecipeStepOperation = "tanh"
	ASinHStepOp   RecipeStepOperation = "asinh"
	ACosHStepOp   RecipeStepOperation = "acosh"
	ATanHStepOp   RecipeStepOperation = "atanh"
	DegreesOp     RecipeStepOperation = "degrees"
	RadiansStepOp RecipeStepOperation = "radians"

	// Date and time functions
	DateStepOp                   RecipeStepOperation = "date"
	TimeStepOp                   RecipeStepOperation = "time"
	DateTimeStepOp               RecipeStepOperation = "date-time"
	DateAddStepOp                RecipeStepOperation = "date-add"
	DateDiffStepOp               RecipeStepOperation = "date-diff"
	DateFormatStepOp             RecipeStepOperation = "date-format"
	UnixTimeFormatStepOp         RecipeStepOperation = "unix-time-format"
	MonthStepOp                  RecipeStepOperation = "month"
	MonthNameStepOp              RecipeStepOperation = "month-name"
	YearStepOp                   RecipeStepOperation = "year"
	DayStepOp                    RecipeStepOperation = "day"
	WeekNumberStepOp             RecipeStepOperation = "week-number"
	WeekDayStepOp                RecipeStepOperation = "week-day"
	HourStepOp                   RecipeStepOperation = "hour"
	MinuteStepOp                 RecipeStepOperation = "minute"
	SecondStepOp                 RecipeStepOperation = "second"
	UnixTimeStepOp               RecipeStepOperation = "unix-time"
	NowStepOp                    RecipeStepOperation = "now"
	TodayStepOp                  RecipeStepOperation = "today"
	ParseDateStepOp              RecipeStepOperation = "parse-date"
	NetworkDaysStepOp            RecipeStepOperation = "network-days"
	NetworkDaysIntlStepOp        RecipeStepOperation = "network-days-intl"
	MinDateStepOp                RecipeStepOperation = "min-date"
	MaxDateStepOp                RecipeStepOperation = "max-date"
	ModeDateStepOp               RecipeStepOperation = "mode-date"
	WorkdayStepOp                RecipeStepOperation = "workday"
	WorkDayIntlStepOp            RecipeStepOperation = "workday-intl"
	ConvertFromUtcStepOp         RecipeStepOperation = "convert-from-utc"
	ConvertToUtcStepOp           RecipeStepOperation = "convert-to-utc"
	ConvertTimeZoneStepOp        RecipeStepOperation = "convert-time-zone"
	MinDateIfStepOp              RecipeStepOperation = "min-date-if"
	MaxDateIfStepOp              RecipeStepOperation = "max-date-if"
	ModeDateIfStepOp             RecipeStepOperation = "model-date-if"
	KthLargestDateStepOp         RecipeStepOperation = "kth-largest-date"
	KthLargestUniqueDateStepOp   RecipeStepOperation = "kth-largest-unique-date"
	KthLargestUniqueDateIfStepOp RecipeStepOperation = "kth-largest-unique-date-step"
	KthLargestDateIfStepOp       RecipeStepOperation = "kth-largest-date-if"
	WeekDayNameStepOp            RecipeStepOperation = "week-day-name"

	// String
	CharStepOp                   RecipeStepOperation = "char"
	UnicodeStepOp                RecipeStepOperation = "unicode"
	UpperStepOp                  RecipeStepOperation = "upper"
	LowerStepOp                  RecipeStepOperation = "lower"
	ProperStepOp                 RecipeStepOperation = "proper"
	TrimStepOp                   RecipeStepOperation = "trim"
	RemoveWhitespaceStepOp       RecipeStepOperation = "remove-white-spaces"
	RemoveSymbolsStepOp          RecipeStepOperation = "remove-symbols"
	LenStepOp                    RecipeStepOperation = "len"
	FindStepOp                   RecipeStepOperation = "find"
	RightFindStepOp              RecipeStepOperation = "right-find"
	SubstringStepOp              RecipeStepOperation = "substring"
	SubstitueStepOp              RecipeStepOperation = "substitute"
	LeftStepOp                   RecipeStepOperation = "left"
	RightStepOp                  RecipeStepOperation = "right"
	PadStepOp                    RecipeStepOperation = "pad"
	MergeStringStepOp            RecipeStepOperation = "merge-string"
	StartsWithStepOp             RecipeStepOperation = "starts-with"
	EndsWithStepOp               RecipeStepOperation = "ends-with"
	RepeatStepOp                 RecipeStepOperation = "repeat"
	ExactStepOp                  RecipeStepOperation = "exact"
	StringGreaterThanStepOp      RecipeStepOperation = "string-greater-than"
	StringGreaterThanEqualStepOp RecipeStepOperation = "string-greater-equal"
	StringLessThanStepOp         RecipeStepOperation = "string-less-than"
	StringLessThanEqualStepOp    RecipeStepOperation = "string-less-than-equal"
	DoubleMetaphoneStepOp        RecipeStepOperation = "double-metaphone"
	DoubleMetaphoneEqualsStepOp  RecipeStepOperation = "double-metaphone-equals"
	TransliterateStepOp          RecipeStepOperation = "transliterate"
	TrimQuotesStepOp             RecipeStepOperation = "trim-quotes"
	Base64EncodeStepOp           RecipeStepOperation = "base64-encode"
	Base64DecodeStepOp           RecipeStepOperation = "base64-decode"

	// Type functions
	IfMissingStepOp  RecipeStepOperation = "if-missing"
	IsMissingStepOp  RecipeStepOperation = "is-missing"
	ParseIntStepOp   RecipeStepOperation = "parse-int"
	ParseBoolStepOp  RecipeStepOperation = "parse-bool"
	ParseFloatStepOp RecipeStepOperation = "parse-float"

	// windows functions
	PrevStepOp                    RecipeStepOperation = "prev"
	NextStepOp                    RecipeStepOperation = "next"
	FillStepOp                    RecipeStepOperation = "fill"
	RankStepOp                    RecipeStepOperation = "rank"
	DenseRankStepOp               RecipeStepOperation = "dense-rank"
	RollingAvgStepOp              RecipeStepOperation = "rolling-avg"
	RollingModeStepOp             RecipeStepOperation = "rolling-mode"
	RollingMaxStepOp              RecipeStepOperation = "rolling-max"
	RollingMinStepOp              RecipeStepOperation = "rolling-min"
	RollingSumStepOp              RecipeStepOperation = "rolling-sum"
	RollingStdDevStepOp           RecipeStepOperation = "rolling-std-dev"
	RollingStdDevSampStepOp       RecipeStepOperation = "rolling-std-dev-samp"
	RollingVarianceStepOp         RecipeStepOperation = "rolling-variance"
	RollingVarianceSampStepOp     RecipeStepOperation = "rolling-variance-samp"
	RollingCountAStepOp           RecipeStepOperation = "rolling-counta"
	RollingKthLargestStepOp       RecipeStepOperation = "rolling-k-largest"
	RollingKthLargestUniqueStepOp RecipeStepOperation = "rolling-k-largest-unique"
	RollingListStepOp             RecipeStepOperation = "rolling-list"
	RowNumberStepOp               RecipeStepOperation = "row-number"
	SessionStepOp                 RecipeStepOperation = "session"

	// other functions
	IpToIntStepOp         RecipeStepOperation = "ip-to-int"
	IntToIpStepOp         RecipeStepOperation = "int-to-ip"
	UrlParamsStepOp       RecipeStepOperation = "url-params"
	COALESCEStepOp        RecipeStepOperation = "coalesce"
	SourceRowNumberStepOp RecipeStepOperation = "source-row-number"
	IfStepOp              RecipeStepOperation = "if"
	CaseStepOp            RecipeStepOperation = "case"
	RangeStepOp           RecipeStepOperation = "range"
	HostStepOp            RecipeStepOperation = "host"
	DomainStepOp          RecipeStepOperation = "domain"
	SubDomainStepOp       RecipeStepOperation = "subdomain"

	// Basic cleaning
	DeleteStep         RecipeStepOperation = "delete"
	DuplicateStep      RecipeStepOperation = "duplicate"
	MoveAfterStep      RecipeStepOperation = "move-after"
	MoveBeforeStep     RecipeStepOperation = "move-before"
	MoveToEndStep      RecipeStepOperation = "move-to-end"
	MoveToIndexStep    RecipeStepOperation = "move-to-index"
	MoveToStartStep    RecipeStepOperation = "move-to-start"
	RenameStep         RecipeStepOperation = "rename"
	ToBoolColumnStep   RecipeStepOperation = "to-boolean-column"
	ToDoubleColumnStep RecipeStepOperation = "to-double-column"
	ToNumberColumnStep RecipeStepOperation = "to-number-column"
	ToStringColumnStep RecipeStepOperation = "to-string-column"

	CapitalCaseStepOp              RecipeStepOperation = "capital-case"
	FormatDateStepOp               RecipeStepOperation = "format-date"
	AddDoubleQuotesStepOp          RecipeStepOperation = "add-double-quotes"
	AddPrefixStepOp                RecipeStepOperation = "add-prefix"
	AddSingleQuotesStepOp          RecipeStepOperation = "add-single-quotes"
	AddSuffixStepOp                RecipeStepOperation = "add-suffix"
	ExtractBetweenDelimitersStepOp RecipeStepOperation = "extract-between-delimiters"
	ExtractBetweenPositionsStepOp  RecipeStepOperation = "extract-between-position"
	ExtractPatternStepOp           RecipeStepOperation = "extract-pattern"
	RemoveCombinedStepOp           RecipeStepOperation = "remove-combined"
	ReplaceBetweenDelimitersStepOp RecipeStepOperation = "replace-between-delimiters"
	ReplaceBetweenPositionsStepOp  RecipeStepOperation = "replace-between-positions"
	ReplaceTextStepOp              RecipeStepOperation = "replace-text"

	FillWithAvgStepOp       RecipeStepOperation = "fill-with-average"
	FillWithCustomStepOp    RecipeStepOperation = "fill-with-custom"
	FillWithEmptyStepOp     RecipeStepOperation = "fill-with-empty"
	FillWithLastValidStepOp RecipeStepOperation = "fill-with-last-valid"
	FillWithMedianStepOp    RecipeStepOperation = "fill-with-median"
	FillWithModeStepOp      RecipeStepOperation = "fill-with-mode"
	FillWithMostFreqStepOp  RecipeStepOperation = "fill-with-most-freq"
	FillWithNullStepOp      RecipeStepOperation = "fill-with-null"
	FillWithSumStepOp       RecipeStepOperation = "fill-with-sum"
	RemoveDuplicatesStepOp  RecipeStepOperation = "remove-duplicates"
	RemoveMissingStepOp     RecipeStepOperation = "remove-missing"

	FlagColumnFromNullStepOp           RecipeStepOperation = "flag-column-from-null"
	FlagColumnFromPatternStepOp        RecipeStepOperation = "flag-column-from-pattern"
	MergeStepOp                        RecipeStepOperation = "merge"
	SplitColumnsBetweenDelimiterStepOp RecipeStepOperation = "split-columns-between-delimiter"
	SplitColumnsBetweenPositionsStepOp RecipeStepOperation = "split-columns-between-positions"
	SplitColumnsFromEndStepOp          RecipeStepOperation = "split-columns-from-end"
	SplitColumnsFromStartStepOp        RecipeStepOperation = "split-columns-from-start"
	SplitColumnMultipleDelimiterStepOp RecipeStepOperation = "split-column-multiple-delimiter"
	SplitColumnSingleDelimiterStepOp   RecipeStepOperation = "split-column-single-delimiter"
	SplitColumnWithIntervalsStepOp     RecipeStepOperation = "split-column-with-intervals"

	CatMappingStepOp   RecipeStepOperation = "category-mapping"
	GroupByStepOp      RecipeStepOperation = "group-by"
	JoinStepOp         RecipeStepOperation = "join"
	OneHotEncodeStepOp RecipeStepOperation = "one-hot-encode"
	PivotStepOp        RecipeStepOperation = "pivot"
	TokenizeStepOp     RecipeStepOperation = "tokenize"
	UnionStepOp        RecipeStepOperation = "union"
	UnpivotStepOp      RecipeStepOperation = "unpivot"
)

type RecipeStepParam

type RecipeStepParam struct {
	Name  string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	Value string `json:"value,omitempty" protobuf:"bytes,2,opt,name=value"`
}

RecipeStepParam is a key value parameter of the recipe

func (*RecipeStepParam) DeepCopy

func (in *RecipeStepParam) DeepCopy() *RecipeStepParam

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeStepParam.

func (*RecipeStepParam) DeepCopyInto

func (in *RecipeStepParam) DeepCopyInto(out *RecipeStepParam)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeStepParam) Descriptor

func (*RecipeStepParam) Descriptor() ([]byte, []int)

func (*RecipeStepParam) Marshal

func (m *RecipeStepParam) Marshal() (dAtA []byte, err error)

func (*RecipeStepParam) MarshalTo

func (m *RecipeStepParam) MarshalTo(dAtA []byte) (int, error)

func (*RecipeStepParam) MarshalToSizedBuffer

func (m *RecipeStepParam) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeStepParam) ProtoMessage

func (*RecipeStepParam) ProtoMessage()

func (*RecipeStepParam) Reset

func (m *RecipeStepParam) Reset()

func (*RecipeStepParam) Size

func (m *RecipeStepParam) Size() (n int)

func (*RecipeStepParam) String

func (this *RecipeStepParam) String() string

func (*RecipeStepParam) Unmarshal

func (m *RecipeStepParam) Unmarshal(dAtA []byte) error

func (*RecipeStepParam) XXX_DiscardUnknown

func (m *RecipeStepParam) XXX_DiscardUnknown()

func (*RecipeStepParam) XXX_Marshal

func (m *RecipeStepParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeStepParam) XXX_Merge

func (m *RecipeStepParam) XXX_Merge(src proto.Message)

func (*RecipeStepParam) XXX_Size

func (m *RecipeStepParam) XXX_Size() int

func (*RecipeStepParam) XXX_Unmarshal

func (m *RecipeStepParam) XXX_Unmarshal(b []byte) error

type RecommendationSchema

type RecommendationSchema struct {
	// The name of the column that specifies user IDs (i.e. the primary key)
	// +kubebuilder:default:="user_id"
	// +kubebuilder:validation:Optional
	UserIDColumn *string `json:"userIDColumn,omitempty" protobuf:"bytes,1,opt,name=userIDColumn"`
	// The name of the column that specifies item IDs
	// +kubebuilder:default:="item_id"
	// +kubebuilder:validation:Optional
	ItemIDColumn *string `json:"itemIDColumn,omitempty" protobuf:"bytes,2,opt,name=itemIDColumn"`
	// The name of the column that specifies ratings
	// +kubebuilder:default:="rating"
	// +kubebuilder:validation:Optional
	RatingColumn *string `json:"ratingColumn,omitempty" protobuf:"bytes,3,opt,name=ratingColumn"`
}

func (*RecommendationSchema) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecommendationSchema.

func (*RecommendationSchema) DeepCopyInto

func (in *RecommendationSchema) DeepCopyInto(out *RecommendationSchema)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecommendationSchema) Descriptor

func (*RecommendationSchema) Descriptor() ([]byte, []int)

func (*RecommendationSchema) Marshal

func (m *RecommendationSchema) Marshal() (dAtA []byte, err error)

func (*RecommendationSchema) MarshalTo

func (m *RecommendationSchema) MarshalTo(dAtA []byte) (int, error)

func (*RecommendationSchema) MarshalToSizedBuffer

func (m *RecommendationSchema) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecommendationSchema) ProtoMessage

func (*RecommendationSchema) ProtoMessage()

func (*RecommendationSchema) Reset

func (m *RecommendationSchema) Reset()

func (*RecommendationSchema) Size

func (m *RecommendationSchema) Size() (n int)

func (*RecommendationSchema) String

func (this *RecommendationSchema) String() string

func (*RecommendationSchema) Unmarshal

func (m *RecommendationSchema) Unmarshal(dAtA []byte) error

func (*RecommendationSchema) XXX_DiscardUnknown

func (m *RecommendationSchema) XXX_DiscardUnknown()

func (*RecommendationSchema) XXX_Marshal

func (m *RecommendationSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecommendationSchema) XXX_Merge

func (m *RecommendationSchema) XXX_Merge(src proto.Message)

func (*RecommendationSchema) XXX_Size

func (m *RecommendationSchema) XXX_Size() int

func (*RecommendationSchema) XXX_Unmarshal

func (m *RecommendationSchema) XXX_Unmarshal(b []byte) error

type RelationshipSpec

type RelationshipSpec struct {
	// The name of the relationship
	// +kubebuilder:validation:Required
	// +required
	Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=type"`
	// The name of the columns that holds the foreign key
	Column string `json:"columns,omitempty" protobuf:"bytes,2,opt,name=column"`
	// The relationship arity
	Arity *catalog.RelationshipArity `json:"arity,omitempty" protobuf:"bytes,3,opt,name=arity"`
	// The name of the other DataSource object
	// +kubebuilder:validation:Required
	// +required
	RelatesTo string `json:"relatesTo,omitempty" protobuf:"bytes,4,opt,name=relatesTo"`
}

RelationSpec defines a relationship between two DataSource objects

func (*RelationshipSpec) DeepCopy

func (in *RelationshipSpec) DeepCopy() *RelationshipSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RelationshipSpec.

func (*RelationshipSpec) DeepCopyInto

func (in *RelationshipSpec) DeepCopyInto(out *RelationshipSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RelationshipSpec) Descriptor

func (*RelationshipSpec) Descriptor() ([]byte, []int)

func (*RelationshipSpec) Marshal

func (m *RelationshipSpec) Marshal() (dAtA []byte, err error)

func (*RelationshipSpec) MarshalTo

func (m *RelationshipSpec) MarshalTo(dAtA []byte) (int, error)

func (*RelationshipSpec) MarshalToSizedBuffer

func (m *RelationshipSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RelationshipSpec) ProtoMessage

func (*RelationshipSpec) ProtoMessage()

func (*RelationshipSpec) Reset

func (m *RelationshipSpec) Reset()

func (*RelationshipSpec) Size

func (m *RelationshipSpec) Size() (n int)

func (*RelationshipSpec) String

func (this *RelationshipSpec) String() string

func (*RelationshipSpec) Unmarshal

func (m *RelationshipSpec) Unmarshal(dAtA []byte) error

func (*RelationshipSpec) XXX_DiscardUnknown

func (m *RelationshipSpec) XXX_DiscardUnknown()

func (*RelationshipSpec) XXX_Marshal

func (m *RelationshipSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RelationshipSpec) XXX_Merge

func (m *RelationshipSpec) XXX_Merge(src proto.Message)

func (*RelationshipSpec) XXX_Size

func (m *RelationshipSpec) XXX_Size() int

func (*RelationshipSpec) XXX_Unmarshal

func (m *RelationshipSpec) XXX_Unmarshal(b []byte) error

type RowSpec

type RowSpec struct {
	Cols []ColumnSpec `json:"cols,omitempty" protobuf:"bytes,1,rep,name=cols"`
}

func (*RowSpec) DeepCopy

func (in *RowSpec) DeepCopy() *RowSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RowSpec.

func (*RowSpec) DeepCopyInto

func (in *RowSpec) DeepCopyInto(out *RowSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RowSpec) Descriptor

func (*RowSpec) Descriptor() ([]byte, []int)

func (*RowSpec) Marshal

func (m *RowSpec) Marshal() (dAtA []byte, err error)

func (*RowSpec) MarshalTo

func (m *RowSpec) MarshalTo(dAtA []byte) (int, error)

func (*RowSpec) MarshalToSizedBuffer

func (m *RowSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RowSpec) ProtoMessage

func (*RowSpec) ProtoMessage()

func (*RowSpec) Reset

func (m *RowSpec) Reset()

func (*RowSpec) Size

func (m *RowSpec) Size() (n int)

func (*RowSpec) String

func (this *RowSpec) String() string

func (*RowSpec) Unmarshal

func (m *RowSpec) Unmarshal(dAtA []byte) error

func (*RowSpec) XXX_DiscardUnknown

func (m *RowSpec) XXX_DiscardUnknown()

func (*RowSpec) XXX_Marshal

func (m *RowSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RowSpec) XXX_Merge

func (m *RowSpec) XXX_Merge(src proto.Message)

func (*RowSpec) XXX_Size

func (m *RowSpec) XXX_Size() int

func (*RowSpec) XXX_Unmarshal

func (m *RowSpec) XXX_Unmarshal(b []byte) error

type SampleSpec

type SampleSpec struct {
	// The type of sampling (stratified sampling, by default). Modela will
	// sample rows until their contents exceed a maximum of 1Mb
	// +kubebuilder:default:="stratified"
	// +kubebuilder:validation:Optional
	Type catalog.SamplingType `json:"type,omitempty" protobuf:"bytes,2,opt,name=type"`
	// The number of rows to sample (by default, 500)
	// +kubebuilder:default:=500
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Maximum=1000
	// +kubebuilder:validation:Optional
	Rows *int32 `json:"rows,omitempty" protobuf:"varint,3,opt,name=rows"`
	// The percentage of rows to sample
	// +kubebuilder:default:=100
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	Percent *int32 `json:"percent,omitempty" protobuf:"varint,4,opt,name=percent"`
	// The filter formula, applicable if the sample type is a filter
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Filter *string `json:"filter,omitempty" protobuf:"bytes,5,opt,name=filter"`
}

SampleSpec specifies how the contents of a dataset should be sampled

func (*SampleSpec) DeepCopy

func (in *SampleSpec) DeepCopy() *SampleSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SampleSpec.

func (*SampleSpec) DeepCopyInto

func (in *SampleSpec) DeepCopyInto(out *SampleSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SampleSpec) Descriptor

func (*SampleSpec) Descriptor() ([]byte, []int)

func (*SampleSpec) Marshal

func (m *SampleSpec) Marshal() (dAtA []byte, err error)

func (*SampleSpec) MarshalTo

func (m *SampleSpec) MarshalTo(dAtA []byte) (int, error)

func (*SampleSpec) MarshalToSizedBuffer

func (m *SampleSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SampleSpec) ProtoMessage

func (*SampleSpec) ProtoMessage()

func (*SampleSpec) Reset

func (m *SampleSpec) Reset()

func (*SampleSpec) Size

func (m *SampleSpec) Size() (n int)

func (*SampleSpec) String

func (this *SampleSpec) String() string

func (*SampleSpec) Unmarshal

func (m *SampleSpec) Unmarshal(dAtA []byte) error

func (*SampleSpec) XXX_DiscardUnknown

func (m *SampleSpec) XXX_DiscardUnknown()

func (*SampleSpec) XXX_Marshal

func (m *SampleSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SampleSpec) XXX_Merge

func (m *SampleSpec) XXX_Merge(src proto.Message)

func (*SampleSpec) XXX_Size

func (m *SampleSpec) XXX_Size() int

func (*SampleSpec) XXX_Unmarshal

func (m *SampleSpec) XXX_Unmarshal(b []byte) error

type ScatterPlotSpec

type ScatterPlotSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// name of the X column
	// +kubebuilder:validation:Optional
	X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"`
	// name of the Y axis columns
	// +kubebuilder:validation:Optional
	Y *string `json:"y,omitempty" protobuf:"bytes,3,opt,name=y"`
}

func (*ScatterPlotSpec) DeepCopy

func (in *ScatterPlotSpec) DeepCopy() *ScatterPlotSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScatterPlotSpec.

func (*ScatterPlotSpec) DeepCopyInto

func (in *ScatterPlotSpec) DeepCopyInto(out *ScatterPlotSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ScatterPlotSpec) Descriptor

func (*ScatterPlotSpec) Descriptor() ([]byte, []int)

func (*ScatterPlotSpec) Marshal

func (m *ScatterPlotSpec) Marshal() (dAtA []byte, err error)

func (*ScatterPlotSpec) MarshalTo

func (m *ScatterPlotSpec) MarshalTo(dAtA []byte) (int, error)

func (*ScatterPlotSpec) MarshalToSizedBuffer

func (m *ScatterPlotSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ScatterPlotSpec) ProtoMessage

func (*ScatterPlotSpec) ProtoMessage()

func (*ScatterPlotSpec) Reset

func (m *ScatterPlotSpec) Reset()

func (*ScatterPlotSpec) Size

func (m *ScatterPlotSpec) Size() (n int)

func (*ScatterPlotSpec) String

func (this *ScatterPlotSpec) String() string

func (*ScatterPlotSpec) Unmarshal

func (m *ScatterPlotSpec) Unmarshal(dAtA []byte) error

func (*ScatterPlotSpec) XXX_DiscardUnknown

func (m *ScatterPlotSpec) XXX_DiscardUnknown()

func (*ScatterPlotSpec) XXX_Marshal

func (m *ScatterPlotSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ScatterPlotSpec) XXX_Merge

func (m *ScatterPlotSpec) XXX_Merge(src proto.Message)

func (*ScatterPlotSpec) XXX_Size

func (m *ScatterPlotSpec) XXX_Size() int

func (*ScatterPlotSpec) XXX_Unmarshal

func (m *ScatterPlotSpec) XXX_Unmarshal(b []byte) error

type Schema

type Schema struct {
	// The time-series schema, which sets time-series specific parameters
	// +kubebuilder:validation:Optional
	TimeSeriesSchema TimeSeriesSchema `json:"timeSeriesSchema,omitempty" protobuf:"bytes,1,opt,name=timeSeriesSchema"`
	// The recommendation schema, which is used for the recommendation ML task
	// +kubebuilder:validation:Optional
	RecommendationSchema RecommendationSchema `json:"recommendationSchema,omitempty" protobuf:"bytes,2,opt,name=recommendationSchema"`
	// The collection of columns and their attributes
	// +kubebuilder:validation:Optional
	Columns []Column `json:"columns,omitempty" protobuf:"bytes,3,rep,name=columns"`
	// The keys columns are the index of the file or table. The set of keys will be used as an index for the in memory representation(e.g. pandas)
	// +kubebuilder:validation:Optional
	Key []string `json:"key,omitempty" protobuf:"bytes,4,rep,name=key"`
}

Schema defines the column-level format and validation rules for data associated with a DataSource

func (*Schema) DeepCopy

func (in *Schema) DeepCopy() *Schema

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Schema.

func (*Schema) DeepCopyInto

func (in *Schema) DeepCopyInto(out *Schema)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Schema) Descriptor

func (*Schema) Descriptor() ([]byte, []int)

func (*Schema) Marshal

func (m *Schema) Marshal() (dAtA []byte, err error)

func (*Schema) MarshalTo

func (m *Schema) MarshalTo(dAtA []byte) (int, error)

func (*Schema) MarshalToSizedBuffer

func (m *Schema) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Schema) ProtoMessage

func (*Schema) ProtoMessage()

func (*Schema) Reset

func (m *Schema) Reset()

func (*Schema) Size

func (m *Schema) Size() (n int)

func (*Schema) String

func (this *Schema) String() string

func (*Schema) Unmarshal

func (m *Schema) Unmarshal(dAtA []byte) error

func (*Schema) XXX_DiscardUnknown

func (m *Schema) XXX_DiscardUnknown()

func (*Schema) XXX_Marshal

func (m *Schema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Schema) XXX_Merge

func (m *Schema) XXX_Merge(src proto.Message)

func (*Schema) XXX_Size

func (m *Schema) XXX_Size() int

func (*Schema) XXX_Unmarshal

func (m *Schema) XXX_Unmarshal(b []byte) error

type SyntheticSpec added in v0.5.177

type SyntheticSpec struct {
	// Indicates if the generation of synthetic data is enabled
	// +kubebuilder:validation:Optional
	Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"`
	// The number of top correlations to be included in the correlation results
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	Rows *int32 `json:"rows,omitempty" protobuf:"varint,2,opt,name=rows"`
}

func (*SyntheticSpec) DeepCopy added in v0.5.177

func (in *SyntheticSpec) DeepCopy() *SyntheticSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SyntheticSpec.

func (*SyntheticSpec) DeepCopyInto added in v0.5.177

func (in *SyntheticSpec) DeepCopyInto(out *SyntheticSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SyntheticSpec) Descriptor added in v0.5.177

func (*SyntheticSpec) Descriptor() ([]byte, []int)

func (*SyntheticSpec) Marshal added in v0.5.177

func (m *SyntheticSpec) Marshal() (dAtA []byte, err error)

func (*SyntheticSpec) MarshalTo added in v0.5.177

func (m *SyntheticSpec) MarshalTo(dAtA []byte) (int, error)

func (*SyntheticSpec) MarshalToSizedBuffer added in v0.5.177

func (m *SyntheticSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SyntheticSpec) ProtoMessage added in v0.5.177

func (*SyntheticSpec) ProtoMessage()

func (*SyntheticSpec) Reset added in v0.5.177

func (m *SyntheticSpec) Reset()

func (*SyntheticSpec) Size added in v0.5.177

func (m *SyntheticSpec) Size() (n int)

func (*SyntheticSpec) String added in v0.5.177

func (this *SyntheticSpec) String() string

func (*SyntheticSpec) Unmarshal added in v0.5.177

func (m *SyntheticSpec) Unmarshal(dAtA []byte) error

func (*SyntheticSpec) XXX_DiscardUnknown added in v0.5.177

func (m *SyntheticSpec) XXX_DiscardUnknown()

func (*SyntheticSpec) XXX_Marshal added in v0.5.177

func (m *SyntheticSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SyntheticSpec) XXX_Merge added in v0.5.177

func (m *SyntheticSpec) XXX_Merge(src proto.Message)

func (*SyntheticSpec) XXX_Size added in v0.5.177

func (m *SyntheticSpec) XXX_Size() int

func (*SyntheticSpec) XXX_Unmarshal added in v0.5.177

func (m *SyntheticSpec) XXX_Unmarshal(b []byte) error

type TableSpec

type TableSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// List of table columns, if empty use call the columns
	// +kubebuilder:validation:Optional
	Columns []string `json:"columns,omitempty" protobuf:"bytes,2,rep,name=columns"`
	// one or more filters
	// +kubebuilder:validation:Optional
	Filters []string `json:"filters,omitempty" protobuf:"bytes,3,rep,name=filters"`
	// GroupBy columns
	// +kubebuilder:validation:Optional
	GroupBy []string `json:"groupby,omitempty" protobuf:"bytes,4,rep,name=groupby"`
	// Row is the row number
	// +kubebuilder:validation:Optional
	Rows *int32 `json:"rows,omitempty" protobuf:"varint,5,opt,name=rows"`
	// Show index column
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	ShowIndex *bool `json:"showIndex,omitempty" protobuf:"varint,6,opt,name=showIndex"`
	// Show borther
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Border *bool `json:"border,omitempty" protobuf:"varint,7,opt,name=border"`
}

func (*TableSpec) DeepCopy

func (in *TableSpec) DeepCopy() *TableSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpec.

func (*TableSpec) DeepCopyInto

func (in *TableSpec) DeepCopyInto(out *TableSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TableSpec) Descriptor

func (*TableSpec) Descriptor() ([]byte, []int)

func (*TableSpec) Marshal

func (m *TableSpec) Marshal() (dAtA []byte, err error)

func (*TableSpec) MarshalTo

func (m *TableSpec) MarshalTo(dAtA []byte) (int, error)

func (*TableSpec) MarshalToSizedBuffer

func (m *TableSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*TableSpec) ProtoMessage

func (*TableSpec) ProtoMessage()

func (*TableSpec) Reset

func (m *TableSpec) Reset()

func (*TableSpec) Size

func (m *TableSpec) Size() (n int)

func (*TableSpec) String

func (this *TableSpec) String() string

func (*TableSpec) Unmarshal

func (m *TableSpec) Unmarshal(dAtA []byte) error

func (*TableSpec) XXX_DiscardUnknown

func (m *TableSpec) XXX_DiscardUnknown()

func (*TableSpec) XXX_Marshal

func (m *TableSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*TableSpec) XXX_Merge

func (m *TableSpec) XXX_Merge(src proto.Message)

func (*TableSpec) XXX_Size

func (m *TableSpec) XXX_Size() int

func (*TableSpec) XXX_Unmarshal

func (m *TableSpec) XXX_Unmarshal(b []byte) error

type TimeSeriesSchema

type TimeSeriesSchema struct {
	// The time series type
	// +kubebuilder:default:="series"
	// +kubebuilder:validation:Optional
	Type *catalog.TimeSeriesType `json:"type,omitempty" protobuf:"bytes,1,rep,name=type"`
	// The time series frequency
	// +kubebuilder:default:="day"
	// +kubebuilder:validation:Optional
	Freq *catalog.Frequency `json:"freq,omitempty" protobuf:"bytes,2,opt,name=freq"`
	// The interval to forecast at this level
	// +kubebuilder:default:=1
	// +kubebuilder:validation:Optional
	Interval *int32 `json:"interval,omitempty" protobuf:"varint,3,opt,name=interval"`
}

func (*TimeSeriesSchema) DeepCopy

func (in *TimeSeriesSchema) DeepCopy() *TimeSeriesSchema

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TimeSeriesSchema.

func (*TimeSeriesSchema) DeepCopyInto

func (in *TimeSeriesSchema) DeepCopyInto(out *TimeSeriesSchema)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TimeSeriesSchema) Descriptor

func (*TimeSeriesSchema) Descriptor() ([]byte, []int)

func (*TimeSeriesSchema) Marshal

func (m *TimeSeriesSchema) Marshal() (dAtA []byte, err error)

func (*TimeSeriesSchema) MarshalTo

func (m *TimeSeriesSchema) MarshalTo(dAtA []byte) (int, error)

func (*TimeSeriesSchema) MarshalToSizedBuffer

func (m *TimeSeriesSchema) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*TimeSeriesSchema) ProtoMessage

func (*TimeSeriesSchema) ProtoMessage()

func (*TimeSeriesSchema) Reset

func (m *TimeSeriesSchema) Reset()

func (*TimeSeriesSchema) Size

func (m *TimeSeriesSchema) Size() (n int)

func (*TimeSeriesSchema) String

func (this *TimeSeriesSchema) String() string

func (*TimeSeriesSchema) Unmarshal

func (m *TimeSeriesSchema) Unmarshal(dAtA []byte) error

func (*TimeSeriesSchema) XXX_DiscardUnknown

func (m *TimeSeriesSchema) XXX_DiscardUnknown()

func (*TimeSeriesSchema) XXX_Marshal

func (m *TimeSeriesSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*TimeSeriesSchema) XXX_Merge

func (m *TimeSeriesSchema) XXX_Merge(src proto.Message)

func (*TimeSeriesSchema) XXX_Size

func (m *TimeSeriesSchema) XXX_Size() int

func (*TimeSeriesSchema) XXX_Unmarshal

func (m *TimeSeriesSchema) XXX_Unmarshal(b []byte) error

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL