plugins

package
v1.0.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 21, 2024 License: Apache-2.0 Imports: 6 Imported by: 12

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	SparkApplication_Type_name = map[int32]string{
		0: "PYTHON",
		1: "JAVA",
		2: "SCALA",
		3: "R",
	}
	SparkApplication_Type_value = map[string]int32{
		"PYTHON": 0,
		"JAVA":   1,
		"SCALA":  2,
		"R":      3,
	}
)

Enum value maps for SparkApplication_Type.

View Source
var File_kozmoidl_plugins_array_job_proto protoreflect.FileDescriptor
View Source
var File_kozmoidl_plugins_dask_proto protoreflect.FileDescriptor
View Source
var File_kozmoidl_plugins_mpi_proto protoreflect.FileDescriptor
View Source
var File_kozmoidl_plugins_presto_proto protoreflect.FileDescriptor
View Source
var File_kozmoidl_plugins_pytorch_proto protoreflect.FileDescriptor
View Source
var File_kozmoidl_plugins_qubole_proto protoreflect.FileDescriptor
View Source
var File_kozmoidl_plugins_ray_proto protoreflect.FileDescriptor
View Source
var File_kozmoidl_plugins_spark_proto protoreflect.FileDescriptor
View Source
var File_kozmoidl_plugins_tensorflow_proto protoreflect.FileDescriptor
View Source
var File_kozmoidl_plugins_waitable_proto protoreflect.FileDescriptor

Functions

This section is empty.

Types

type ArrayJob

type ArrayJob struct {

	// Defines the maximum number of instances to bring up concurrently at any given point. Note that this is an
	// optimistic restriction and that, due to network partitioning or other failures, the actual number of currently
	// running instances might be more. This has to be a positive number if assigned. Default value is size.
	Parallelism int64 `protobuf:"varint,1,opt,name=parallelism,proto3" json:"parallelism,omitempty"`
	// Defines the number of instances to launch at most. This number should match the size of the input if the job
	// requires processing of all input data. This has to be a positive number.
	// In the case this is not defined, the back-end will determine the size at run-time by reading the inputs.
	Size int64 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"`
	// Types that are assignable to SuccessCriteria:
	//
	//	*ArrayJob_MinSuccesses
	//	*ArrayJob_MinSuccessRatio
	SuccessCriteria isArrayJob_SuccessCriteria `protobuf_oneof:"success_criteria"`
	// contains filtered or unexported fields
}

Describes a job that can process independent pieces of data concurrently. Multiple copies of the runnable component will be executed concurrently.

func (*ArrayJob) Descriptor deprecated

func (*ArrayJob) Descriptor() ([]byte, []int)

Deprecated: Use ArrayJob.ProtoReflect.Descriptor instead.

func (*ArrayJob) GetMinSuccessRatio

func (x *ArrayJob) GetMinSuccessRatio() float32

func (*ArrayJob) GetMinSuccesses

func (x *ArrayJob) GetMinSuccesses() int64

func (*ArrayJob) GetParallelism

func (x *ArrayJob) GetParallelism() int64

func (*ArrayJob) GetSize

func (x *ArrayJob) GetSize() int64

func (*ArrayJob) GetSuccessCriteria

func (m *ArrayJob) GetSuccessCriteria() isArrayJob_SuccessCriteria

func (*ArrayJob) ProtoMessage

func (*ArrayJob) ProtoMessage()

func (*ArrayJob) ProtoReflect

func (x *ArrayJob) ProtoReflect() protoreflect.Message

func (*ArrayJob) Reset

func (x *ArrayJob) Reset()

func (*ArrayJob) String

func (x *ArrayJob) String() string

type ArrayJob_MinSuccessRatio

type ArrayJob_MinSuccessRatio struct {
	// If the array job size is not known beforehand, the min_success_ratio can instead be used to determine when an array
	// job can be marked successful.
	MinSuccessRatio float32 `protobuf:"fixed32,4,opt,name=min_success_ratio,json=minSuccessRatio,proto3,oneof"`
}

type ArrayJob_MinSuccesses

type ArrayJob_MinSuccesses struct {
	// An absolute number of the minimum number of successful completions of subtasks. As soon as this criteria is met,
	// the array job will be marked as successful and outputs will be computed. This has to be a non-negative number if
	// assigned. Default value is size (if specified).
	MinSuccesses int64 `protobuf:"varint,3,opt,name=min_successes,json=minSuccesses,proto3,oneof"`
}

type DaskJob

type DaskJob struct {

	// Spec for the scheduler pod.
	Scheduler *DaskScheduler `protobuf:"bytes,1,opt,name=scheduler,proto3" json:"scheduler,omitempty"`
	// Spec of the default worker group.
	Workers *DaskWorkerGroup `protobuf:"bytes,2,opt,name=workers,proto3" json:"workers,omitempty"`
	// contains filtered or unexported fields
}

Custom Proto for Dask Plugin.

func (*DaskJob) Descriptor deprecated

func (*DaskJob) Descriptor() ([]byte, []int)

Deprecated: Use DaskJob.ProtoReflect.Descriptor instead.

func (*DaskJob) GetScheduler

func (x *DaskJob) GetScheduler() *DaskScheduler

func (*DaskJob) GetWorkers

func (x *DaskJob) GetWorkers() *DaskWorkerGroup

func (*DaskJob) ProtoMessage

func (*DaskJob) ProtoMessage()

func (*DaskJob) ProtoReflect

func (x *DaskJob) ProtoReflect() protoreflect.Message

func (*DaskJob) Reset

func (x *DaskJob) Reset()

func (*DaskJob) String

func (x *DaskJob) String() string

type DaskScheduler

type DaskScheduler struct {

	// Optional image to use. If unset, will use the default image.
	Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"`
	// Resources assigned to the scheduler pod.
	Resources *core.Resources `protobuf:"bytes,2,opt,name=resources,proto3" json:"resources,omitempty"`
	// contains filtered or unexported fields
}

Specification for the scheduler pod.

func (*DaskScheduler) Descriptor deprecated

func (*DaskScheduler) Descriptor() ([]byte, []int)

Deprecated: Use DaskScheduler.ProtoReflect.Descriptor instead.

func (*DaskScheduler) GetImage

func (x *DaskScheduler) GetImage() string

func (*DaskScheduler) GetResources

func (x *DaskScheduler) GetResources() *core.Resources

func (*DaskScheduler) ProtoMessage

func (*DaskScheduler) ProtoMessage()

func (*DaskScheduler) ProtoReflect

func (x *DaskScheduler) ProtoReflect() protoreflect.Message

func (*DaskScheduler) Reset

func (x *DaskScheduler) Reset()

func (*DaskScheduler) String

func (x *DaskScheduler) String() string

type DaskWorkerGroup

type DaskWorkerGroup struct {

	// Number of workers in the group.
	NumberOfWorkers uint32 `protobuf:"varint,1,opt,name=number_of_workers,json=numberOfWorkers,proto3" json:"number_of_workers,omitempty"`
	// Optional image to use for the pods of the worker group. If unset, will use the default image.
	Image string `protobuf:"bytes,2,opt,name=image,proto3" json:"image,omitempty"`
	// Resources assigned to the all pods of the worker group.
	// As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices
	// it is advised to only set limits. If requests are not explicitly set, the plugin will make
	// sure to set requests==limits.
	// The plugin sets ` --memory-limit` as well as `--nthreads` for the workers according to the limit.
	Resources *core.Resources `protobuf:"bytes,3,opt,name=resources,proto3" json:"resources,omitempty"`
	// contains filtered or unexported fields
}

func (*DaskWorkerGroup) Descriptor deprecated

func (*DaskWorkerGroup) Descriptor() ([]byte, []int)

Deprecated: Use DaskWorkerGroup.ProtoReflect.Descriptor instead.

func (*DaskWorkerGroup) GetImage

func (x *DaskWorkerGroup) GetImage() string

func (*DaskWorkerGroup) GetNumberOfWorkers

func (x *DaskWorkerGroup) GetNumberOfWorkers() uint32

func (*DaskWorkerGroup) GetResources

func (x *DaskWorkerGroup) GetResources() *core.Resources

func (*DaskWorkerGroup) ProtoMessage

func (*DaskWorkerGroup) ProtoMessage()

func (*DaskWorkerGroup) ProtoReflect

func (x *DaskWorkerGroup) ProtoReflect() protoreflect.Message

func (*DaskWorkerGroup) Reset

func (x *DaskWorkerGroup) Reset()

func (*DaskWorkerGroup) String

func (x *DaskWorkerGroup) String() string

type DistributedMPITrainingTask

type DistributedMPITrainingTask struct {

	// number of worker spawned in the cluster for this job
	NumWorkers int32 `protobuf:"varint,1,opt,name=num_workers,json=numWorkers,proto3" json:"num_workers,omitempty"`
	// number of launcher replicas spawned in the cluster for this job
	// The launcher pod invokes mpirun and communicates with worker pods through MPI.
	NumLauncherReplicas int32 `protobuf:"varint,2,opt,name=num_launcher_replicas,json=numLauncherReplicas,proto3" json:"num_launcher_replicas,omitempty"`
	// number of slots per worker used in hostfile.
	// The available slots (GPUs) in each pod.
	Slots int32 `protobuf:"varint,3,opt,name=slots,proto3" json:"slots,omitempty"`
	// contains filtered or unexported fields
}

MPI operator proposal https://github.com/kubeflow/community/blob/master/proposals/mpi-operator-proposal.md Custom proto for plugin that enables distributed training using https://github.com/kubeflow/mpi-operator

func (*DistributedMPITrainingTask) Descriptor deprecated

func (*DistributedMPITrainingTask) Descriptor() ([]byte, []int)

Deprecated: Use DistributedMPITrainingTask.ProtoReflect.Descriptor instead.

func (*DistributedMPITrainingTask) GetNumLauncherReplicas

func (x *DistributedMPITrainingTask) GetNumLauncherReplicas() int32

func (*DistributedMPITrainingTask) GetNumWorkers

func (x *DistributedMPITrainingTask) GetNumWorkers() int32

func (*DistributedMPITrainingTask) GetSlots

func (x *DistributedMPITrainingTask) GetSlots() int32

func (*DistributedMPITrainingTask) ProtoMessage

func (*DistributedMPITrainingTask) ProtoMessage()

func (*DistributedMPITrainingTask) ProtoReflect

func (*DistributedMPITrainingTask) Reset

func (x *DistributedMPITrainingTask) Reset()

func (*DistributedMPITrainingTask) String

func (x *DistributedMPITrainingTask) String() string

type DistributedPyTorchTrainingTask

type DistributedPyTorchTrainingTask struct {

	// number of worker replicas spawned in the cluster for this job
	Workers int32 `protobuf:"varint,1,opt,name=workers,proto3" json:"workers,omitempty"`
	// config for an elastic pytorch job
	ElasticConfig *ElasticConfig `protobuf:"bytes,2,opt,name=elastic_config,json=elasticConfig,proto3" json:"elastic_config,omitempty"`
	// contains filtered or unexported fields
}

Custom proto for plugin that enables distributed training using https://github.com/kubeflow/pytorch-operator

func (*DistributedPyTorchTrainingTask) Descriptor deprecated

func (*DistributedPyTorchTrainingTask) Descriptor() ([]byte, []int)

Deprecated: Use DistributedPyTorchTrainingTask.ProtoReflect.Descriptor instead.

func (*DistributedPyTorchTrainingTask) GetElasticConfig

func (x *DistributedPyTorchTrainingTask) GetElasticConfig() *ElasticConfig

func (*DistributedPyTorchTrainingTask) GetWorkers

func (x *DistributedPyTorchTrainingTask) GetWorkers() int32

func (*DistributedPyTorchTrainingTask) ProtoMessage

func (*DistributedPyTorchTrainingTask) ProtoMessage()

func (*DistributedPyTorchTrainingTask) ProtoReflect

func (*DistributedPyTorchTrainingTask) Reset

func (x *DistributedPyTorchTrainingTask) Reset()

func (*DistributedPyTorchTrainingTask) String

type DistributedTensorflowTrainingTask

type DistributedTensorflowTrainingTask struct {

	// number of worker replicas spawned in the cluster for this job
	Workers int32 `protobuf:"varint,1,opt,name=workers,proto3" json:"workers,omitempty"`
	// PS -> Parameter server
	// number of ps replicas spawned in the cluster for this job
	PsReplicas int32 `protobuf:"varint,2,opt,name=ps_replicas,json=psReplicas,proto3" json:"ps_replicas,omitempty"`
	// number of chief replicas spawned in the cluster for this job
	ChiefReplicas int32 `protobuf:"varint,3,opt,name=chief_replicas,json=chiefReplicas,proto3" json:"chief_replicas,omitempty"`
	// number of evaluator replicas spawned in the cluster for this job
	EvaluatorReplicas int32 `protobuf:"varint,4,opt,name=evaluator_replicas,json=evaluatorReplicas,proto3" json:"evaluator_replicas,omitempty"`
	// contains filtered or unexported fields
}

Custom proto for plugin that enables distributed training using https://github.com/kubeflow/tf-operator

func (*DistributedTensorflowTrainingTask) Descriptor deprecated

func (*DistributedTensorflowTrainingTask) Descriptor() ([]byte, []int)

Deprecated: Use DistributedTensorflowTrainingTask.ProtoReflect.Descriptor instead.

func (*DistributedTensorflowTrainingTask) GetChiefReplicas

func (x *DistributedTensorflowTrainingTask) GetChiefReplicas() int32

func (*DistributedTensorflowTrainingTask) GetEvaluatorReplicas

func (x *DistributedTensorflowTrainingTask) GetEvaluatorReplicas() int32

func (*DistributedTensorflowTrainingTask) GetPsReplicas

func (x *DistributedTensorflowTrainingTask) GetPsReplicas() int32

func (*DistributedTensorflowTrainingTask) GetWorkers

func (x *DistributedTensorflowTrainingTask) GetWorkers() int32

func (*DistributedTensorflowTrainingTask) ProtoMessage

func (*DistributedTensorflowTrainingTask) ProtoMessage()

func (*DistributedTensorflowTrainingTask) ProtoReflect

func (*DistributedTensorflowTrainingTask) Reset

func (*DistributedTensorflowTrainingTask) String

type ElasticConfig

type ElasticConfig struct {
	RdzvBackend  string `protobuf:"bytes,1,opt,name=rdzv_backend,json=rdzvBackend,proto3" json:"rdzv_backend,omitempty"`
	MinReplicas  int32  `protobuf:"varint,2,opt,name=min_replicas,json=minReplicas,proto3" json:"min_replicas,omitempty"`
	MaxReplicas  int32  `protobuf:"varint,3,opt,name=max_replicas,json=maxReplicas,proto3" json:"max_replicas,omitempty"`
	NprocPerNode int32  `protobuf:"varint,4,opt,name=nproc_per_node,json=nprocPerNode,proto3" json:"nproc_per_node,omitempty"`
	MaxRestarts  int32  `protobuf:"varint,5,opt,name=max_restarts,json=maxRestarts,proto3" json:"max_restarts,omitempty"`
	// contains filtered or unexported fields
}

Custom proto for torch elastic config for distributed training using https://github.com/kubeflow/training-operator/blob/master/pkg/apis/kubeflow.org/v1/pytorch_types.go

func (*ElasticConfig) Descriptor deprecated

func (*ElasticConfig) Descriptor() ([]byte, []int)

Deprecated: Use ElasticConfig.ProtoReflect.Descriptor instead.

func (*ElasticConfig) GetMaxReplicas

func (x *ElasticConfig) GetMaxReplicas() int32

func (*ElasticConfig) GetMaxRestarts

func (x *ElasticConfig) GetMaxRestarts() int32

func (*ElasticConfig) GetMinReplicas

func (x *ElasticConfig) GetMinReplicas() int32

func (*ElasticConfig) GetNprocPerNode

func (x *ElasticConfig) GetNprocPerNode() int32

func (*ElasticConfig) GetRdzvBackend

func (x *ElasticConfig) GetRdzvBackend() string

func (*ElasticConfig) ProtoMessage

func (*ElasticConfig) ProtoMessage()

func (*ElasticConfig) ProtoReflect

func (x *ElasticConfig) ProtoReflect() protoreflect.Message

func (*ElasticConfig) Reset

func (x *ElasticConfig) Reset()

func (*ElasticConfig) String

func (x *ElasticConfig) String() string

type HeadGroupSpec

type HeadGroupSpec struct {

	// Optional. RayStartParams are the params of the start command: address, object-store-memory.
	// Refer to https://docs.ray.io/en/latest/ray-core/package-ref.html#ray-start
	RayStartParams map[string]string `` /* 193-byte string literal not displayed */
	// contains filtered or unexported fields
}

HeadGroupSpec are the spec for the head pod

func (*HeadGroupSpec) Descriptor deprecated

func (*HeadGroupSpec) Descriptor() ([]byte, []int)

Deprecated: Use HeadGroupSpec.ProtoReflect.Descriptor instead.

func (*HeadGroupSpec) GetRayStartParams

func (x *HeadGroupSpec) GetRayStartParams() map[string]string

func (*HeadGroupSpec) ProtoMessage

func (*HeadGroupSpec) ProtoMessage()

func (*HeadGroupSpec) ProtoReflect

func (x *HeadGroupSpec) ProtoReflect() protoreflect.Message

func (*HeadGroupSpec) Reset

func (x *HeadGroupSpec) Reset()

func (*HeadGroupSpec) String

func (x *HeadGroupSpec) String() string

type HiveQuery

type HiveQuery struct {
	Query      string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"`
	TimeoutSec uint32 `protobuf:"varint,2,opt,name=timeout_sec,json=timeoutSec,proto3" json:"timeout_sec,omitempty"`
	RetryCount uint32 `protobuf:"varint,3,opt,name=retryCount,proto3" json:"retryCount,omitempty"`
	// contains filtered or unexported fields
}

Defines a query to execute on a hive cluster.

func (*HiveQuery) Descriptor deprecated

func (*HiveQuery) Descriptor() ([]byte, []int)

Deprecated: Use HiveQuery.ProtoReflect.Descriptor instead.

func (*HiveQuery) GetQuery

func (x *HiveQuery) GetQuery() string

func (*HiveQuery) GetRetryCount

func (x *HiveQuery) GetRetryCount() uint32

func (*HiveQuery) GetTimeoutSec

func (x *HiveQuery) GetTimeoutSec() uint32

func (*HiveQuery) ProtoMessage

func (*HiveQuery) ProtoMessage()

func (*HiveQuery) ProtoReflect

func (x *HiveQuery) ProtoReflect() protoreflect.Message

func (*HiveQuery) Reset

func (x *HiveQuery) Reset()

func (*HiveQuery) String

func (x *HiveQuery) String() string

type HiveQueryCollection

type HiveQueryCollection struct {
	Queries []*HiveQuery `protobuf:"bytes,2,rep,name=queries,proto3" json:"queries,omitempty"`
	// contains filtered or unexported fields
}

Defines a collection of hive queries.

func (*HiveQueryCollection) Descriptor deprecated

func (*HiveQueryCollection) Descriptor() ([]byte, []int)

Deprecated: Use HiveQueryCollection.ProtoReflect.Descriptor instead.

func (*HiveQueryCollection) GetQueries

func (x *HiveQueryCollection) GetQueries() []*HiveQuery

func (*HiveQueryCollection) ProtoMessage

func (*HiveQueryCollection) ProtoMessage()

func (*HiveQueryCollection) ProtoReflect

func (x *HiveQueryCollection) ProtoReflect() protoreflect.Message

func (*HiveQueryCollection) Reset

func (x *HiveQueryCollection) Reset()

func (*HiveQueryCollection) String

func (x *HiveQueryCollection) String() string

type PrestoQuery

type PrestoQuery struct {
	RoutingGroup string `protobuf:"bytes,1,opt,name=routing_group,json=routingGroup,proto3" json:"routing_group,omitempty"`
	Catalog      string `protobuf:"bytes,2,opt,name=catalog,proto3" json:"catalog,omitempty"`
	Schema       string `protobuf:"bytes,3,opt,name=schema,proto3" json:"schema,omitempty"`
	Statement    string `protobuf:"bytes,4,opt,name=statement,proto3" json:"statement,omitempty"`
	// contains filtered or unexported fields
}

This message works with the 'presto' task type in the SDK and is the object that will be in the 'custom' field of a Presto task's TaskTemplate

func (*PrestoQuery) Descriptor deprecated

func (*PrestoQuery) Descriptor() ([]byte, []int)

Deprecated: Use PrestoQuery.ProtoReflect.Descriptor instead.

func (*PrestoQuery) GetCatalog

func (x *PrestoQuery) GetCatalog() string

func (*PrestoQuery) GetRoutingGroup

func (x *PrestoQuery) GetRoutingGroup() string

func (*PrestoQuery) GetSchema

func (x *PrestoQuery) GetSchema() string

func (*PrestoQuery) GetStatement

func (x *PrestoQuery) GetStatement() string

func (*PrestoQuery) ProtoMessage

func (*PrestoQuery) ProtoMessage()

func (*PrestoQuery) ProtoReflect

func (x *PrestoQuery) ProtoReflect() protoreflect.Message

func (*PrestoQuery) Reset

func (x *PrestoQuery) Reset()

func (*PrestoQuery) String

func (x *PrestoQuery) String() string

type QuboleHiveJob

type QuboleHiveJob struct {
	ClusterLabel string `protobuf:"bytes,1,opt,name=cluster_label,json=clusterLabel,proto3" json:"cluster_label,omitempty"`
	// Deprecated: Marked as deprecated in kozmoidl/plugins/qubole.proto.
	QueryCollection *HiveQueryCollection `protobuf:"bytes,2,opt,name=query_collection,json=queryCollection,proto3" json:"query_collection,omitempty"`
	Tags            []string             `protobuf:"bytes,3,rep,name=tags,proto3" json:"tags,omitempty"`
	Query           *HiveQuery           `protobuf:"bytes,4,opt,name=query,proto3" json:"query,omitempty"`
	// contains filtered or unexported fields
}

This message works with the 'hive' task type in the SDK and is the object that will be in the 'custom' field of a hive task's TaskTemplate

func (*QuboleHiveJob) Descriptor deprecated

func (*QuboleHiveJob) Descriptor() ([]byte, []int)

Deprecated: Use QuboleHiveJob.ProtoReflect.Descriptor instead.

func (*QuboleHiveJob) GetClusterLabel

func (x *QuboleHiveJob) GetClusterLabel() string

func (*QuboleHiveJob) GetQuery

func (x *QuboleHiveJob) GetQuery() *HiveQuery

func (*QuboleHiveJob) GetQueryCollection deprecated

func (x *QuboleHiveJob) GetQueryCollection() *HiveQueryCollection

Deprecated: Marked as deprecated in kozmoidl/plugins/qubole.proto.

func (*QuboleHiveJob) GetTags

func (x *QuboleHiveJob) GetTags() []string

func (*QuboleHiveJob) ProtoMessage

func (*QuboleHiveJob) ProtoMessage()

func (*QuboleHiveJob) ProtoReflect

func (x *QuboleHiveJob) ProtoReflect() protoreflect.Message

func (*QuboleHiveJob) Reset

func (x *QuboleHiveJob) Reset()

func (*QuboleHiveJob) String

func (x *QuboleHiveJob) String() string

type RayCluster

type RayCluster struct {

	// HeadGroupSpecs are the spec for the head pod
	HeadGroupSpec *HeadGroupSpec `protobuf:"bytes,1,opt,name=head_group_spec,json=headGroupSpec,proto3" json:"head_group_spec,omitempty"`
	// WorkerGroupSpecs are the specs for the worker pods
	WorkerGroupSpec []*WorkerGroupSpec `protobuf:"bytes,2,rep,name=worker_group_spec,json=workerGroupSpec,proto3" json:"worker_group_spec,omitempty"`
	// Whether to enable autoscaling.
	EnableAutoscaling bool `protobuf:"varint,3,opt,name=enable_autoscaling,json=enableAutoscaling,proto3" json:"enable_autoscaling,omitempty"`
	// contains filtered or unexported fields
}

Define Ray cluster defines the desired state of RayCluster

func (*RayCluster) Descriptor deprecated

func (*RayCluster) Descriptor() ([]byte, []int)

Deprecated: Use RayCluster.ProtoReflect.Descriptor instead.

func (*RayCluster) GetEnableAutoscaling

func (x *RayCluster) GetEnableAutoscaling() bool

func (*RayCluster) GetHeadGroupSpec

func (x *RayCluster) GetHeadGroupSpec() *HeadGroupSpec

func (*RayCluster) GetWorkerGroupSpec

func (x *RayCluster) GetWorkerGroupSpec() []*WorkerGroupSpec

func (*RayCluster) ProtoMessage

func (*RayCluster) ProtoMessage()

func (*RayCluster) ProtoReflect

func (x *RayCluster) ProtoReflect() protoreflect.Message

func (*RayCluster) Reset

func (x *RayCluster) Reset()

func (*RayCluster) String

func (x *RayCluster) String() string

type RayJob

type RayJob struct {

	// RayClusterSpec is the cluster template to run the job
	RayCluster *RayCluster `protobuf:"bytes,1,opt,name=ray_cluster,json=rayCluster,proto3" json:"ray_cluster,omitempty"`
	// runtime_env is base64 encoded.
	// Ray runtime environments: https://docs.ray.io/en/latest/ray-core/handling-dependencies.html#runtime-environments
	RuntimeEnv string `protobuf:"bytes,2,opt,name=runtime_env,json=runtimeEnv,proto3" json:"runtime_env,omitempty"`
	// shutdown_after_job_finishes specifies whether the RayCluster should be deleted after the RayJob finishes.
	ShutdownAfterJobFinishes bool `` /* 138-byte string literal not displayed */
	// ttl_seconds_after_finished specifies the number of seconds after which the RayCluster will be deleted after the RayJob finishes.
	TtlSecondsAfterFinished int32 `` /* 135-byte string literal not displayed */
	// contains filtered or unexported fields
}

RayJobSpec defines the desired state of RayJob

func (*RayJob) Descriptor deprecated

func (*RayJob) Descriptor() ([]byte, []int)

Deprecated: Use RayJob.ProtoReflect.Descriptor instead.

func (*RayJob) GetRayCluster

func (x *RayJob) GetRayCluster() *RayCluster

func (*RayJob) GetRuntimeEnv

func (x *RayJob) GetRuntimeEnv() string

func (*RayJob) GetShutdownAfterJobFinishes

func (x *RayJob) GetShutdownAfterJobFinishes() bool

func (*RayJob) GetTtlSecondsAfterFinished

func (x *RayJob) GetTtlSecondsAfterFinished() int32

func (*RayJob) ProtoMessage

func (*RayJob) ProtoMessage()

func (*RayJob) ProtoReflect

func (x *RayJob) ProtoReflect() protoreflect.Message

func (*RayJob) Reset

func (x *RayJob) Reset()

func (*RayJob) String

func (x *RayJob) String() string

type SparkApplication

type SparkApplication struct {
	// contains filtered or unexported fields
}

func (*SparkApplication) Descriptor deprecated

func (*SparkApplication) Descriptor() ([]byte, []int)

Deprecated: Use SparkApplication.ProtoReflect.Descriptor instead.

func (*SparkApplication) ProtoMessage

func (*SparkApplication) ProtoMessage()

func (*SparkApplication) ProtoReflect

func (x *SparkApplication) ProtoReflect() protoreflect.Message

func (*SparkApplication) Reset

func (x *SparkApplication) Reset()

func (*SparkApplication) String

func (x *SparkApplication) String() string

type SparkApplication_Type

type SparkApplication_Type int32
const (
	SparkApplication_PYTHON SparkApplication_Type = 0
	SparkApplication_JAVA   SparkApplication_Type = 1
	SparkApplication_SCALA  SparkApplication_Type = 2
	SparkApplication_R      SparkApplication_Type = 3
)

func (SparkApplication_Type) Descriptor

func (SparkApplication_Type) Enum

func (SparkApplication_Type) EnumDescriptor deprecated

func (SparkApplication_Type) EnumDescriptor() ([]byte, []int)

Deprecated: Use SparkApplication_Type.Descriptor instead.

func (SparkApplication_Type) Number

func (SparkApplication_Type) String

func (x SparkApplication_Type) String() string

func (SparkApplication_Type) Type

type SparkJob

type SparkJob struct {
	ApplicationType     SparkApplication_Type `` /* 128-byte string literal not displayed */
	MainApplicationFile string                `protobuf:"bytes,2,opt,name=mainApplicationFile,proto3" json:"mainApplicationFile,omitempty"`
	MainClass           string                `protobuf:"bytes,3,opt,name=mainClass,proto3" json:"mainClass,omitempty"`
	SparkConf           map[string]string     ``                                                                              /* 159-byte string literal not displayed */
	HadoopConf          map[string]string     ``                                                                              /* 161-byte string literal not displayed */
	ExecutorPath        string                `protobuf:"bytes,6,opt,name=executorPath,proto3" json:"executorPath,omitempty"` // Executor path for Python jobs.
	// Databricks job configuration.
	// Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
	DatabricksConf *structpb.Struct `protobuf:"bytes,7,opt,name=databricksConf,proto3" json:"databricksConf,omitempty"`
	// Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html
	// This token can be set in either kozmopropeller or kozmokit.
	DatabricksToken string `protobuf:"bytes,8,opt,name=databricksToken,proto3" json:"databricksToken,omitempty"`
	// Domain name of your deployment. Use the form <account>.cloud.databricks.com.
	// This instance name can be set in either kozmopropeller or kozmokit.
	DatabricksInstance string `protobuf:"bytes,9,opt,name=databricksInstance,proto3" json:"databricksInstance,omitempty"`
	// contains filtered or unexported fields
}

Custom Proto for Spark Plugin.

func (*SparkJob) Descriptor deprecated

func (*SparkJob) Descriptor() ([]byte, []int)

Deprecated: Use SparkJob.ProtoReflect.Descriptor instead.

func (*SparkJob) GetApplicationType

func (x *SparkJob) GetApplicationType() SparkApplication_Type

func (*SparkJob) GetDatabricksConf

func (x *SparkJob) GetDatabricksConf() *structpb.Struct

func (*SparkJob) GetDatabricksInstance

func (x *SparkJob) GetDatabricksInstance() string

func (*SparkJob) GetDatabricksToken

func (x *SparkJob) GetDatabricksToken() string

func (*SparkJob) GetExecutorPath

func (x *SparkJob) GetExecutorPath() string

func (*SparkJob) GetHadoopConf

func (x *SparkJob) GetHadoopConf() map[string]string

func (*SparkJob) GetMainApplicationFile

func (x *SparkJob) GetMainApplicationFile() string

func (*SparkJob) GetMainClass

func (x *SparkJob) GetMainClass() string

func (*SparkJob) GetSparkConf

func (x *SparkJob) GetSparkConf() map[string]string

func (*SparkJob) ProtoMessage

func (*SparkJob) ProtoMessage()

func (*SparkJob) ProtoReflect

func (x *SparkJob) ProtoReflect() protoreflect.Message

func (*SparkJob) Reset

func (x *SparkJob) Reset()

func (*SparkJob) String

func (x *SparkJob) String() string

type Waitable

type Waitable struct {
	WfExecId   *core.WorkflowExecutionIdentifier `protobuf:"bytes,1,opt,name=wf_exec_id,json=wfExecId,proto3" json:"wf_exec_id,omitempty"`
	Phase      core.WorkflowExecution_Phase      `protobuf:"varint,2,opt,name=phase,proto3,enum=kozmoidl.core.WorkflowExecution_Phase" json:"phase,omitempty"`
	WorkflowId string                            `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"`
	// contains filtered or unexported fields
}

Represents an Execution that was launched and could be waited on.

func (*Waitable) Descriptor deprecated

func (*Waitable) Descriptor() ([]byte, []int)

Deprecated: Use Waitable.ProtoReflect.Descriptor instead.

func (*Waitable) GetPhase

func (x *Waitable) GetPhase() core.WorkflowExecution_Phase

func (*Waitable) GetWfExecId

func (x *Waitable) GetWfExecId() *core.WorkflowExecutionIdentifier

func (*Waitable) GetWorkflowId

func (x *Waitable) GetWorkflowId() string

func (*Waitable) ProtoMessage

func (*Waitable) ProtoMessage()

func (*Waitable) ProtoReflect

func (x *Waitable) ProtoReflect() protoreflect.Message

func (*Waitable) Reset

func (x *Waitable) Reset()

func (*Waitable) String

func (x *Waitable) String() string

type WorkerGroupSpec

type WorkerGroupSpec struct {

	// Required. RayCluster can have multiple worker groups, and it distinguishes them by name
	GroupName string `protobuf:"bytes,1,opt,name=group_name,json=groupName,proto3" json:"group_name,omitempty"`
	// Required. Desired replicas of the worker group. Defaults to 1.
	Replicas int32 `protobuf:"varint,2,opt,name=replicas,proto3" json:"replicas,omitempty"`
	// Optional. Min replicas of the worker group. MinReplicas defaults to 1.
	MinReplicas int32 `protobuf:"varint,3,opt,name=min_replicas,json=minReplicas,proto3" json:"min_replicas,omitempty"`
	// Optional. Max replicas of the worker group. MaxReplicas defaults to maxInt32
	MaxReplicas int32 `protobuf:"varint,4,opt,name=max_replicas,json=maxReplicas,proto3" json:"max_replicas,omitempty"`
	// Optional. RayStartParams are the params of the start command: address, object-store-memory.
	// Refer to https://docs.ray.io/en/latest/ray-core/package-ref.html#ray-start
	RayStartParams map[string]string `` /* 193-byte string literal not displayed */
	// contains filtered or unexported fields
}

WorkerGroupSpec are the specs for the worker pods

func (*WorkerGroupSpec) Descriptor deprecated

func (*WorkerGroupSpec) Descriptor() ([]byte, []int)

Deprecated: Use WorkerGroupSpec.ProtoReflect.Descriptor instead.

func (*WorkerGroupSpec) GetGroupName

func (x *WorkerGroupSpec) GetGroupName() string

func (*WorkerGroupSpec) GetMaxReplicas

func (x *WorkerGroupSpec) GetMaxReplicas() int32

func (*WorkerGroupSpec) GetMinReplicas

func (x *WorkerGroupSpec) GetMinReplicas() int32

func (*WorkerGroupSpec) GetRayStartParams

func (x *WorkerGroupSpec) GetRayStartParams() map[string]string

func (*WorkerGroupSpec) GetReplicas

func (x *WorkerGroupSpec) GetReplicas() int32

func (*WorkerGroupSpec) ProtoMessage

func (*WorkerGroupSpec) ProtoMessage()

func (*WorkerGroupSpec) ProtoReflect

func (x *WorkerGroupSpec) ProtoReflect() protoreflect.Message

func (*WorkerGroupSpec) Reset

func (x *WorkerGroupSpec) Reset()

func (*WorkerGroupSpec) String

func (x *WorkerGroupSpec) String() string

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL