v1alpha1

package
v0.5.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 8, 2022 License: Apache-2.0 Imports: 16 Imported by: 0

Documentation

Overview

+groupName=dataproc.google.kubeform.com

Index

Constants

This section is empty.

Variables

View Source
var (
	// TODO: move SchemeBuilder with zz_generated.deepcopy.go to k8s.io/api.
	// localSchemeBuilder and AddToScheme will stay in k8s.io/kubernetes.
	SchemeBuilder runtime.SchemeBuilder

	AddToScheme = localSchemeBuilder.AddToScheme
)
View Source
var SchemeGroupVersion = schema.GroupVersion{Group: dataproc.GroupName, Version: "v1alpha1"}

Functions

func GetDecoder

func GetDecoder() map[string]jsoniter.ValDecoder

func GetEncoder

func GetEncoder() map[string]jsoniter.ValEncoder

func Kind

func Kind(kind string) schema.GroupKind

Kind takes an unqualified kind and returns a Group qualified GroupKind

func Resource

func Resource(resource string) schema.GroupResource

Resource takes an unqualified resource and returns a Group qualified GroupResource

Types

type AutoscalingPolicy

type AutoscalingPolicy struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              AutoscalingPolicySpec   `json:"spec,omitempty"`
	Status            AutoscalingPolicyStatus `json:"status,omitempty"`
}

func (*AutoscalingPolicy) DeepCopy

func (in *AutoscalingPolicy) DeepCopy() *AutoscalingPolicy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicy.

func (*AutoscalingPolicy) DeepCopyInto

func (in *AutoscalingPolicy) DeepCopyInto(out *AutoscalingPolicy)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*AutoscalingPolicy) DeepCopyObject

func (in *AutoscalingPolicy) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*AutoscalingPolicy) SetupWebhookWithManager

func (r *AutoscalingPolicy) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*AutoscalingPolicy) ValidateCreate

func (r *AutoscalingPolicy) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*AutoscalingPolicy) ValidateDelete

func (r *AutoscalingPolicy) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*AutoscalingPolicy) ValidateUpdate

func (r *AutoscalingPolicy) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type AutoscalingPolicyList

type AutoscalingPolicyList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of AutoscalingPolicy CRD objects
	Items []AutoscalingPolicy `json:"items,omitempty"`
}

AutoscalingPolicyList is a list of AutoscalingPolicys

func (*AutoscalingPolicyList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyList.

func (*AutoscalingPolicyList) DeepCopyInto

func (in *AutoscalingPolicyList) DeepCopyInto(out *AutoscalingPolicyList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*AutoscalingPolicyList) DeepCopyObject

func (in *AutoscalingPolicyList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type AutoscalingPolicySpec

type AutoscalingPolicySpec struct {
	State *AutoscalingPolicySpecResource `json:"state,omitempty" tf:"-"`

	Resource AutoscalingPolicySpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*AutoscalingPolicySpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicySpec.

func (*AutoscalingPolicySpec) DeepCopyInto

func (in *AutoscalingPolicySpec) DeepCopyInto(out *AutoscalingPolicySpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type AutoscalingPolicySpecBasicAlgorithm

type AutoscalingPolicySpecBasicAlgorithm struct {
	// Duration between scaling events. A scaling period starts after the
	// update operation from the previous event has completed.
	//
	// Bounds: [2m, 1d]. Default: 2m.
	// +optional
	CooldownPeriod *string `json:"cooldownPeriod,omitempty" tf:"cooldown_period"`
	// YARN autoscaling configuration.
	YarnConfig *AutoscalingPolicySpecBasicAlgorithmYarnConfig `json:"yarnConfig" tf:"yarn_config"`
}

func (*AutoscalingPolicySpecBasicAlgorithm) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicySpecBasicAlgorithm.

func (*AutoscalingPolicySpecBasicAlgorithm) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type AutoscalingPolicySpecBasicAlgorithmCodec

type AutoscalingPolicySpecBasicAlgorithmCodec struct {
}

+k8s:deepcopy-gen=false

func (AutoscalingPolicySpecBasicAlgorithmCodec) Decode

func (AutoscalingPolicySpecBasicAlgorithmCodec) Encode

func (AutoscalingPolicySpecBasicAlgorithmCodec) IsEmpty

type AutoscalingPolicySpecBasicAlgorithmYarnConfig

type AutoscalingPolicySpecBasicAlgorithmYarnConfig struct {
	// Timeout for YARN graceful decommissioning of Node Managers. Specifies the
	// duration to wait for jobs to complete before forcefully removing workers
	// (and potentially interrupting jobs). Only applicable to downscaling operations.
	//
	// Bounds: [0s, 1d].
	GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout" tf:"graceful_decommission_timeout"`
	// Fraction of average pending memory in the last cooldown period for which to
	// remove workers. A scale-down factor of 1 will result in scaling down so that there
	// is no available memory remaining after the update (more aggressive scaling).
	// A scale-down factor of 0 disables removing workers, which can be beneficial for
	// autoscaling a single job.
	//
	// Bounds: [0.0, 1.0].
	ScaleDownFactor *float64 `json:"scaleDownFactor" tf:"scale_down_factor"`
	// Minimum scale-down threshold as a fraction of total cluster size before scaling occurs.
	// For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler must
	// recommend at least a 2 worker scale-down for the cluster to scale. A threshold of 0
	// means the autoscaler will scale down on any recommended change.
	//
	// Bounds: [0.0, 1.0]. Default: 0.0.
	// +optional
	ScaleDownMinWorkerFraction *float64 `json:"scaleDownMinWorkerFraction,omitempty" tf:"scale_down_min_worker_fraction"`
	// Fraction of average pending memory in the last cooldown period for which to
	// add workers. A scale-up factor of 1.0 will result in scaling up so that there
	// is no pending memory remaining after the update (more aggressive scaling).
	// A scale-up factor closer to 0 will result in a smaller magnitude of scaling up
	// (less aggressive scaling).
	//
	// Bounds: [0.0, 1.0].
	ScaleUpFactor *float64 `json:"scaleUpFactor" tf:"scale_up_factor"`
	// Minimum scale-up threshold as a fraction of total cluster size before scaling
	// occurs. For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler
	// must recommend at least a 2-worker scale-up for the cluster to scale. A threshold of
	// 0 means the autoscaler will scale up on any recommended change.
	//
	// Bounds: [0.0, 1.0]. Default: 0.0.
	// +optional
	ScaleUpMinWorkerFraction *float64 `json:"scaleUpMinWorkerFraction,omitempty" tf:"scale_up_min_worker_fraction"`
}

func (*AutoscalingPolicySpecBasicAlgorithmYarnConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicySpecBasicAlgorithmYarnConfig.

func (*AutoscalingPolicySpecBasicAlgorithmYarnConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type AutoscalingPolicySpecBasicAlgorithmYarnConfigCodec

type AutoscalingPolicySpecBasicAlgorithmYarnConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (AutoscalingPolicySpecBasicAlgorithmYarnConfigCodec) Decode

func (AutoscalingPolicySpecBasicAlgorithmYarnConfigCodec) Encode

func (AutoscalingPolicySpecBasicAlgorithmYarnConfigCodec) IsEmpty

type AutoscalingPolicySpecResource

type AutoscalingPolicySpecResource struct {
	Timeouts *base.ResourceTimeout `json:"timeouts,omitempty" tf:"timeouts"`

	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// Basic algorithm for autoscaling.
	// +optional
	BasicAlgorithm *AutoscalingPolicySpecBasicAlgorithm `json:"basicAlgorithm,omitempty" tf:"basic_algorithm"`
	// The  location where the autoscaling policy should reside.
	// The default value is 'global'.
	// +optional
	Location *string `json:"location,omitempty" tf:"location"`
	// The "resource name" of the autoscaling policy.
	// +optional
	Name *string `json:"name,omitempty" tf:"name"`
	// The policy id. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_),
	// and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between
	// 3 and 50 characters.
	PolicyID *string `json:"policyID" tf:"policy_id"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// Describes how the autoscaler will operate for secondary workers.
	// +optional
	SecondaryWorkerConfig *AutoscalingPolicySpecSecondaryWorkerConfig `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config"`
	// Describes how the autoscaler will operate for primary workers.
	// +optional
	WorkerConfig *AutoscalingPolicySpecWorkerConfig `json:"workerConfig,omitempty" tf:"worker_config"`
}

func (*AutoscalingPolicySpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicySpecResource.

func (*AutoscalingPolicySpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type AutoscalingPolicySpecSecondaryWorkerConfig

type AutoscalingPolicySpecSecondaryWorkerConfig struct {
	// Maximum number of instances for this group. Note that by default, clusters will not use
	// secondary workers. Required for secondary workers if the minimum secondary instances is set.
	// Bounds: [minInstances, ). Defaults to 0.
	// +optional
	MaxInstances *int64 `json:"maxInstances,omitempty" tf:"max_instances"`
	// Minimum number of instances for this group. Bounds: [0, maxInstances]. Defaults to 0.
	// +optional
	MinInstances *int64 `json:"minInstances,omitempty" tf:"min_instances"`
	// Weight for the instance group, which is used to determine the fraction of total workers
	// in the cluster from this instance group. For example, if primary workers have weight 2,
	// and secondary workers have weight 1, the cluster will have approximately 2 primary workers
	// for each secondary worker.
	//
	// The cluster may not reach the specified balance if constrained by min/max bounds or other
	// autoscaling settings. For example, if maxInstances for secondary workers is 0, then only
	// primary workers will be added. The cluster can also be out of balance when created.
	//
	// If weight is not set on any instance group, the cluster will default to equal weight for
	// all groups: the cluster will attempt to maintain an equal number of workers in each group
	// within the configured size bounds for each group. If weight is set for one group only,
	// the cluster will default to zero weight on the unset group. For example if weight is set
	// only on primary workers, the cluster will use primary workers only and no secondary workers.
	// +optional
	Weight *int64 `json:"weight,omitempty" tf:"weight"`
}

func (*AutoscalingPolicySpecSecondaryWorkerConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicySpecSecondaryWorkerConfig.

func (*AutoscalingPolicySpecSecondaryWorkerConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type AutoscalingPolicySpecSecondaryWorkerConfigCodec

type AutoscalingPolicySpecSecondaryWorkerConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (AutoscalingPolicySpecSecondaryWorkerConfigCodec) Decode

func (AutoscalingPolicySpecSecondaryWorkerConfigCodec) Encode

func (AutoscalingPolicySpecSecondaryWorkerConfigCodec) IsEmpty

type AutoscalingPolicySpecWorkerConfig

type AutoscalingPolicySpecWorkerConfig struct {
	// Maximum number of instances for this group.
	MaxInstances *int64 `json:"maxInstances" tf:"max_instances"`
	// Minimum number of instances for this group. Bounds: [2, maxInstances]. Defaults to 2.
	// +optional
	MinInstances *int64 `json:"minInstances,omitempty" tf:"min_instances"`
	// Weight for the instance group, which is used to determine the fraction of total workers
	// in the cluster from this instance group. For example, if primary workers have weight 2,
	// and secondary workers have weight 1, the cluster will have approximately 2 primary workers
	// for each secondary worker.
	//
	// The cluster may not reach the specified balance if constrained by min/max bounds or other
	// autoscaling settings. For example, if maxInstances for secondary workers is 0, then only
	// primary workers will be added. The cluster can also be out of balance when created.
	//
	// If weight is not set on any instance group, the cluster will default to equal weight for
	// all groups: the cluster will attempt to maintain an equal number of workers in each group
	// within the configured size bounds for each group. If weight is set for one group only,
	// the cluster will default to zero weight on the unset group. For example if weight is set
	// only on primary workers, the cluster will use primary workers only and no secondary workers.
	// +optional
	Weight *int64 `json:"weight,omitempty" tf:"weight"`
}

func (*AutoscalingPolicySpecWorkerConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicySpecWorkerConfig.

func (*AutoscalingPolicySpecWorkerConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type AutoscalingPolicySpecWorkerConfigCodec

type AutoscalingPolicySpecWorkerConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (AutoscalingPolicySpecWorkerConfigCodec) Decode

func (AutoscalingPolicySpecWorkerConfigCodec) Encode

func (AutoscalingPolicySpecWorkerConfigCodec) IsEmpty

type AutoscalingPolicyStatus

type AutoscalingPolicyStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*AutoscalingPolicyStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyStatus.

func (*AutoscalingPolicyStatus) DeepCopyInto

func (in *AutoscalingPolicyStatus) DeepCopyInto(out *AutoscalingPolicyStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Cluster

type Cluster struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              ClusterSpec   `json:"spec,omitempty"`
	Status            ClusterStatus `json:"status,omitempty"`
}

func (*Cluster) DeepCopy

func (in *Cluster) DeepCopy() *Cluster

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Cluster.

func (*Cluster) DeepCopyInto

func (in *Cluster) DeepCopyInto(out *Cluster)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Cluster) DeepCopyObject

func (in *Cluster) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Cluster) SetupWebhookWithManager

func (r *Cluster) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Cluster) ValidateCreate

func (r *Cluster) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Cluster) ValidateDelete

func (r *Cluster) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*Cluster) ValidateUpdate

func (r *Cluster) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type ClusterIamBinding

type ClusterIamBinding struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              ClusterIamBindingSpec   `json:"spec,omitempty"`
	Status            ClusterIamBindingStatus `json:"status,omitempty"`
}

func (*ClusterIamBinding) DeepCopy

func (in *ClusterIamBinding) DeepCopy() *ClusterIamBinding

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamBinding.

func (*ClusterIamBinding) DeepCopyInto

func (in *ClusterIamBinding) DeepCopyInto(out *ClusterIamBinding)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ClusterIamBinding) DeepCopyObject

func (in *ClusterIamBinding) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*ClusterIamBinding) SetupWebhookWithManager

func (r *ClusterIamBinding) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*ClusterIamBinding) ValidateCreate

func (r *ClusterIamBinding) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*ClusterIamBinding) ValidateDelete

func (r *ClusterIamBinding) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*ClusterIamBinding) ValidateUpdate

func (r *ClusterIamBinding) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type ClusterIamBindingList

type ClusterIamBindingList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of ClusterIamBinding CRD objects
	Items []ClusterIamBinding `json:"items,omitempty"`
}

ClusterIamBindingList is a list of ClusterIamBindings

func (*ClusterIamBindingList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamBindingList.

func (*ClusterIamBindingList) DeepCopyInto

func (in *ClusterIamBindingList) DeepCopyInto(out *ClusterIamBindingList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ClusterIamBindingList) DeepCopyObject

func (in *ClusterIamBindingList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type ClusterIamBindingSpec

type ClusterIamBindingSpec struct {
	State *ClusterIamBindingSpecResource `json:"state,omitempty" tf:"-"`

	Resource ClusterIamBindingSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*ClusterIamBindingSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamBindingSpec.

func (*ClusterIamBindingSpec) DeepCopyInto

func (in *ClusterIamBindingSpec) DeepCopyInto(out *ClusterIamBindingSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterIamBindingSpecCondition

type ClusterIamBindingSpecCondition struct {
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	Expression  *string `json:"expression" tf:"expression"`
	Title       *string `json:"title" tf:"title"`
}

func (*ClusterIamBindingSpecCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamBindingSpecCondition.

func (*ClusterIamBindingSpecCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterIamBindingSpecConditionCodec

type ClusterIamBindingSpecConditionCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterIamBindingSpecConditionCodec) Decode

func (ClusterIamBindingSpecConditionCodec) Encode

func (ClusterIamBindingSpecConditionCodec) IsEmpty

type ClusterIamBindingSpecResource

type ClusterIamBindingSpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	Cluster *string `json:"cluster" tf:"cluster"`
	// +optional
	Condition *ClusterIamBindingSpecCondition `json:"condition,omitempty" tf:"condition"`
	// +optional
	Etag    *string  `json:"etag,omitempty" tf:"etag"`
	Members []string `json:"members" tf:"members"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// +optional
	Region *string `json:"region,omitempty" tf:"region"`
	Role   *string `json:"role" tf:"role"`
}

func (*ClusterIamBindingSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamBindingSpecResource.

func (*ClusterIamBindingSpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterIamBindingStatus

type ClusterIamBindingStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*ClusterIamBindingStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamBindingStatus.

func (*ClusterIamBindingStatus) DeepCopyInto

func (in *ClusterIamBindingStatus) DeepCopyInto(out *ClusterIamBindingStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterIamMember

type ClusterIamMember struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              ClusterIamMemberSpec   `json:"spec,omitempty"`
	Status            ClusterIamMemberStatus `json:"status,omitempty"`
}

func (*ClusterIamMember) DeepCopy

func (in *ClusterIamMember) DeepCopy() *ClusterIamMember

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamMember.

func (*ClusterIamMember) DeepCopyInto

func (in *ClusterIamMember) DeepCopyInto(out *ClusterIamMember)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ClusterIamMember) DeepCopyObject

func (in *ClusterIamMember) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*ClusterIamMember) SetupWebhookWithManager

func (r *ClusterIamMember) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*ClusterIamMember) ValidateCreate

func (r *ClusterIamMember) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*ClusterIamMember) ValidateDelete

func (r *ClusterIamMember) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*ClusterIamMember) ValidateUpdate

func (r *ClusterIamMember) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type ClusterIamMemberList

type ClusterIamMemberList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of ClusterIamMember CRD objects
	Items []ClusterIamMember `json:"items,omitempty"`
}

ClusterIamMemberList is a list of ClusterIamMembers

func (*ClusterIamMemberList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamMemberList.

func (*ClusterIamMemberList) DeepCopyInto

func (in *ClusterIamMemberList) DeepCopyInto(out *ClusterIamMemberList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ClusterIamMemberList) DeepCopyObject

func (in *ClusterIamMemberList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type ClusterIamMemberSpec

type ClusterIamMemberSpec struct {
	State *ClusterIamMemberSpecResource `json:"state,omitempty" tf:"-"`

	Resource ClusterIamMemberSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*ClusterIamMemberSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamMemberSpec.

func (*ClusterIamMemberSpec) DeepCopyInto

func (in *ClusterIamMemberSpec) DeepCopyInto(out *ClusterIamMemberSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterIamMemberSpecCondition

type ClusterIamMemberSpecCondition struct {
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	Expression  *string `json:"expression" tf:"expression"`
	Title       *string `json:"title" tf:"title"`
}

func (*ClusterIamMemberSpecCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamMemberSpecCondition.

func (*ClusterIamMemberSpecCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterIamMemberSpecConditionCodec

type ClusterIamMemberSpecConditionCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterIamMemberSpecConditionCodec) Decode

func (ClusterIamMemberSpecConditionCodec) Encode

func (ClusterIamMemberSpecConditionCodec) IsEmpty

type ClusterIamMemberSpecResource

type ClusterIamMemberSpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	Cluster *string `json:"cluster" tf:"cluster"`
	// +optional
	Condition *ClusterIamMemberSpecCondition `json:"condition,omitempty" tf:"condition"`
	// +optional
	Etag   *string `json:"etag,omitempty" tf:"etag"`
	Member *string `json:"member" tf:"member"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// +optional
	Region *string `json:"region,omitempty" tf:"region"`
	Role   *string `json:"role" tf:"role"`
}

func (*ClusterIamMemberSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamMemberSpecResource.

func (*ClusterIamMemberSpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterIamMemberStatus

type ClusterIamMemberStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*ClusterIamMemberStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamMemberStatus.

func (*ClusterIamMemberStatus) DeepCopyInto

func (in *ClusterIamMemberStatus) DeepCopyInto(out *ClusterIamMemberStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterIamPolicy

type ClusterIamPolicy struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              ClusterIamPolicySpec   `json:"spec,omitempty"`
	Status            ClusterIamPolicyStatus `json:"status,omitempty"`
}

func (*ClusterIamPolicy) DeepCopy

func (in *ClusterIamPolicy) DeepCopy() *ClusterIamPolicy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamPolicy.

func (*ClusterIamPolicy) DeepCopyInto

func (in *ClusterIamPolicy) DeepCopyInto(out *ClusterIamPolicy)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ClusterIamPolicy) DeepCopyObject

func (in *ClusterIamPolicy) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*ClusterIamPolicy) SetupWebhookWithManager

func (r *ClusterIamPolicy) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*ClusterIamPolicy) ValidateCreate

func (r *ClusterIamPolicy) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*ClusterIamPolicy) ValidateDelete

func (r *ClusterIamPolicy) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*ClusterIamPolicy) ValidateUpdate

func (r *ClusterIamPolicy) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type ClusterIamPolicyList

type ClusterIamPolicyList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of ClusterIamPolicy CRD objects
	Items []ClusterIamPolicy `json:"items,omitempty"`
}

ClusterIamPolicyList is a list of ClusterIamPolicys

func (*ClusterIamPolicyList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamPolicyList.

func (*ClusterIamPolicyList) DeepCopyInto

func (in *ClusterIamPolicyList) DeepCopyInto(out *ClusterIamPolicyList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ClusterIamPolicyList) DeepCopyObject

func (in *ClusterIamPolicyList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type ClusterIamPolicySpec

type ClusterIamPolicySpec struct {
	State *ClusterIamPolicySpecResource `json:"state,omitempty" tf:"-"`

	Resource ClusterIamPolicySpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*ClusterIamPolicySpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamPolicySpec.

func (*ClusterIamPolicySpec) DeepCopyInto

func (in *ClusterIamPolicySpec) DeepCopyInto(out *ClusterIamPolicySpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterIamPolicySpecResource

type ClusterIamPolicySpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	Cluster *string `json:"cluster" tf:"cluster"`
	// +optional
	Etag       *string `json:"etag,omitempty" tf:"etag"`
	PolicyData *string `json:"policyData" tf:"policy_data"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// +optional
	Region *string `json:"region,omitempty" tf:"region"`
}

func (*ClusterIamPolicySpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamPolicySpecResource.

func (*ClusterIamPolicySpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterIamPolicyStatus

type ClusterIamPolicyStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*ClusterIamPolicyStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterIamPolicyStatus.

func (*ClusterIamPolicyStatus) DeepCopyInto

func (in *ClusterIamPolicyStatus) DeepCopyInto(out *ClusterIamPolicyStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterList

type ClusterList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of Cluster CRD objects
	Items []Cluster `json:"items,omitempty"`
}

ClusterList is a list of Clusters

func (*ClusterList) DeepCopy

func (in *ClusterList) DeepCopy() *ClusterList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterList.

func (*ClusterList) DeepCopyInto

func (in *ClusterList) DeepCopyInto(out *ClusterList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ClusterList) DeepCopyObject

func (in *ClusterList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type ClusterSpec

type ClusterSpec struct {
	State *ClusterSpecResource `json:"state,omitempty" tf:"-"`

	Resource ClusterSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*ClusterSpec) DeepCopy

func (in *ClusterSpec) DeepCopy() *ClusterSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpec.

func (*ClusterSpec) DeepCopyInto

func (in *ClusterSpec) DeepCopyInto(out *ClusterSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfig

type ClusterSpecClusterConfig struct {
	// The autoscaling policy config associated with the cluster.
	// +optional
	AutoscalingConfig *ClusterSpecClusterConfigAutoscalingConfig `json:"autoscalingConfig,omitempty" tf:"autoscaling_config"`
	//  The name of the cloud storage bucket ultimately used to house the staging data for the cluster. If staging_bucket is specified, it will contain this value, otherwise it will be the auto generated name.
	// +optional
	Bucket *string `json:"bucket,omitempty" tf:"bucket"`
	// The Customer managed encryption keys settings for the cluster.
	// +optional
	EncryptionConfig *ClusterSpecClusterConfigEncryptionConfig `json:"encryptionConfig,omitempty" tf:"encryption_config"`
	// Common config settings for resources of Google Compute Engine cluster instances, applicable to all instances in the cluster.
	// +optional
	GceClusterConfig *ClusterSpecClusterConfigGceClusterConfig `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config"`
	// Commands to execute on each node after config is completed. You can specify multiple versions of these.
	// +optional
	InitializationAction []ClusterSpecClusterConfigInitializationAction `json:"initializationAction,omitempty" tf:"initialization_action"`
	// The Google Compute Engine config settings for the master/worker instances in a cluster.
	// +optional
	MasterConfig *ClusterSpecClusterConfigMasterConfig `json:"masterConfig,omitempty" tf:"master_config"`
	// The Google Compute Engine config settings for the additional (aka preemptible) instances in a cluster.
	// +optional
	PreemptibleWorkerConfig *ClusterSpecClusterConfigPreemptibleWorkerConfig `json:"preemptibleWorkerConfig,omitempty" tf:"preemptible_worker_config"`
	// Security related configuration.
	// +optional
	SecurityConfig *ClusterSpecClusterConfigSecurityConfig `json:"securityConfig,omitempty" tf:"security_config"`
	// The config settings for software inside the cluster.
	// +optional
	SoftwareConfig *ClusterSpecClusterConfigSoftwareConfig `json:"softwareConfig,omitempty" tf:"software_config"`
	// The Cloud Storage staging bucket used to stage files, such as Hadoop jars, between client machines and the cluster. Note: If you don't explicitly specify a staging_bucket then GCP will auto create / assign one for you. However, you are not guaranteed an auto generated bucket which is solely dedicated to your cluster; it may be shared with other clusters in the same region/zone also choosing to use the auto generation option.
	// +optional
	StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket"`
	// The Cloud Storage temp bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. Note: If you don't explicitly specify a temp_bucket then GCP will auto create / assign one for you.
	// +optional
	TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket"`
	// The Google Compute Engine config settings for the master/worker instances in a cluster.
	// +optional
	WorkerConfig *ClusterSpecClusterConfigWorkerConfig `json:"workerConfig,omitempty" tf:"worker_config"`
}

func (*ClusterSpecClusterConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfig.

func (*ClusterSpecClusterConfig) DeepCopyInto

func (in *ClusterSpecClusterConfig) DeepCopyInto(out *ClusterSpecClusterConfig)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigAutoscalingConfig

type ClusterSpecClusterConfigAutoscalingConfig struct {
	// The autoscaling policy used by the cluster.
	PolicyURI *string `json:"policyURI" tf:"policy_uri"`
}

func (*ClusterSpecClusterConfigAutoscalingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigAutoscalingConfig.

func (*ClusterSpecClusterConfigAutoscalingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigAutoscalingConfigCodec

type ClusterSpecClusterConfigAutoscalingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigAutoscalingConfigCodec) Decode

func (ClusterSpecClusterConfigAutoscalingConfigCodec) Encode

func (ClusterSpecClusterConfigAutoscalingConfigCodec) IsEmpty

type ClusterSpecClusterConfigCodec

type ClusterSpecClusterConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigCodec) Decode

func (ClusterSpecClusterConfigCodec) Encode

func (ClusterSpecClusterConfigCodec) IsEmpty

type ClusterSpecClusterConfigEncryptionConfig

type ClusterSpecClusterConfigEncryptionConfig struct {
	// The Cloud KMS key name to use for PD disk encryption for all instances in the cluster.
	KmsKeyName *string `json:"kmsKeyName" tf:"kms_key_name"`
}

func (*ClusterSpecClusterConfigEncryptionConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigEncryptionConfig.

func (*ClusterSpecClusterConfigEncryptionConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigEncryptionConfigCodec

type ClusterSpecClusterConfigEncryptionConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigEncryptionConfigCodec) Decode

func (ClusterSpecClusterConfigEncryptionConfigCodec) Encode

func (ClusterSpecClusterConfigEncryptionConfigCodec) IsEmpty

type ClusterSpecClusterConfigGceClusterConfig

type ClusterSpecClusterConfigGceClusterConfig struct {
	// By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. If set to true, all instances in the cluster will only have internal IP addresses. Note: Private Google Access (also known as privateIpGoogleAccess) must be enabled on the subnetwork that the cluster will be launched in.
	// +optional
	InternalIPOnly *bool `json:"internalIPOnly,omitempty" tf:"internal_ip_only"`
	// A map of the Compute Engine metadata entries to add to all instances
	// +optional
	Metadata *map[string]string `json:"metadata,omitempty" tf:"metadata"`
	// The name or self_link of the Google Compute Engine network to the cluster will be part of. Conflicts with subnetwork. If neither is specified, this defaults to the "default" network.
	// +optional
	Network *string `json:"network,omitempty" tf:"network"`
	// The service account to be used by the Node VMs. If not specified, the "default" service account is used.
	// +optional
	ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account"`
	// The set of Google API scopes to be made available on all of the node VMs under the service_account specified. These can be either FQDNs, or scope aliases.
	// +optional
	ServiceAccountScopes []string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes"`
	// Shielded Instance Config for clusters using Compute Engine Shielded VMs.
	// +optional
	ShieldedInstanceConfig *ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfig `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config"`
	// The name or self_link of the Google Compute Engine subnetwork the cluster will be part of. Conflicts with network.
	// +optional
	Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork"`
	// The list of instance tags applied to instances in the cluster. Tags are used to identify valid sources or targets for network firewalls.
	// +optional
	Tags []string `json:"tags,omitempty" tf:"tags"`
	// The GCP zone where your data is stored and used (i.e. where the master and the worker nodes will be created in). If region is set to 'global' (default) then zone is mandatory, otherwise GCP is able to make use of Auto Zone Placement to determine this automatically for you. Note: This setting additionally determines and restricts which computing resources are available for use with other configs such as cluster_config.master_config.machine_type and cluster_config.worker_config.machine_type.
	// +optional
	Zone *string `json:"zone,omitempty" tf:"zone"`
}

func (*ClusterSpecClusterConfigGceClusterConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigGceClusterConfig.

func (*ClusterSpecClusterConfigGceClusterConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigGceClusterConfigCodec

type ClusterSpecClusterConfigGceClusterConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigGceClusterConfigCodec) Decode

func (ClusterSpecClusterConfigGceClusterConfigCodec) Encode

func (ClusterSpecClusterConfigGceClusterConfigCodec) IsEmpty

type ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfig

type ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfig struct {
	// Defines whether instances have integrity monitoring enabled.
	// +optional
	EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring"`
	// Defines whether instances have Secure Boot enabled.
	// +optional
	EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot"`
	// Defines whether instances have the vTPM enabled.
	// +optional
	EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm"`
}

func (*ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfig.

func (*ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfigCodec

type ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfigCodec) Decode

func (ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfigCodec) Encode

func (ClusterSpecClusterConfigGceClusterConfigShieldedInstanceConfigCodec) IsEmpty

type ClusterSpecClusterConfigInitializationAction

type ClusterSpecClusterConfigInitializationAction struct {
	// The script to be executed during initialization of the cluster. The script must be a GCS file with a gs:// prefix.
	Script *string `json:"script" tf:"script"`
	// The maximum duration (in seconds) which script is allowed to take to execute its action. GCP will default to a predetermined computed value if not set (currently 300).
	// +optional
	TimeoutSec *int64 `json:"timeoutSec,omitempty" tf:"timeout_sec"`
}

func (*ClusterSpecClusterConfigInitializationAction) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigInitializationAction.

func (*ClusterSpecClusterConfigInitializationAction) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigMasterConfig

type ClusterSpecClusterConfigMasterConfig struct {
	// The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times.
	// +optional
	Accelerators []ClusterSpecClusterConfigMasterConfigAccelerators `json:"accelerators,omitempty" tf:"accelerators"`
	// Disk Config
	// +optional
	DiskConfig *ClusterSpecClusterConfigMasterConfigDiskConfig `json:"diskConfig,omitempty" tf:"disk_config"`
	// The URI for the image to use for this master/worker
	// +optional
	ImageURI *string `json:"imageURI,omitempty" tf:"image_uri"`
	// List of master/worker instance names which have been assigned to the cluster.
	// +optional
	InstanceNames []string `json:"instanceNames,omitempty" tf:"instance_names"`
	// The name of a Google Compute Engine machine type to create for the master/worker
	// +optional
	MachineType *string `json:"machineType,omitempty" tf:"machine_type"`
	// The name of a minimum generation of CPU family for the master/worker. If not specified, GCP will default to a predetermined computed value for each zone.
	// +optional
	MinCPUPlatform *string `json:"minCPUPlatform,omitempty" tf:"min_cpu_platform"`
	// Specifies the number of master/worker nodes to create. If not specified, GCP will default to a predetermined computed value.
	// +optional
	NumInstances *int64 `json:"numInstances,omitempty" tf:"num_instances"`
}

func (*ClusterSpecClusterConfigMasterConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigMasterConfig.

func (*ClusterSpecClusterConfigMasterConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigMasterConfigAccelerators

type ClusterSpecClusterConfigMasterConfigAccelerators struct {
	// The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8.
	AcceleratorCount *int64 `json:"acceleratorCount" tf:"accelerator_count"`
	// The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80.
	AcceleratorType *string `json:"acceleratorType" tf:"accelerator_type"`
}

func (*ClusterSpecClusterConfigMasterConfigAccelerators) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigMasterConfigAccelerators.

func (*ClusterSpecClusterConfigMasterConfigAccelerators) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigMasterConfigCodec

type ClusterSpecClusterConfigMasterConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigMasterConfigCodec) Decode

func (ClusterSpecClusterConfigMasterConfigCodec) Encode

func (ClusterSpecClusterConfigMasterConfigCodec) IsEmpty

type ClusterSpecClusterConfigMasterConfigDiskConfig

type ClusterSpecClusterConfigMasterConfigDiskConfig struct {
	// Size of the primary disk attached to each node, specified in GB. The primary disk contains the boot volume and system libraries, and the smallest allowed disk size is 10GB. GCP will default to a predetermined computed value if not set (currently 500GB). Note: If SSDs are not attached, it also contains the HDFS data blocks and Hadoop working directories.
	// +optional
	BootDiskSizeGb *int64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb"`
	// The disk type of the primary disk attached to each node. One of "pd-ssd" or "pd-standard". Defaults to "pd-standard".
	// +optional
	BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type"`
	// The amount of local SSD disks that will be attached to each master cluster node. Defaults to 0.
	// +optional
	NumLocalSsds *int64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds"`
}

func (*ClusterSpecClusterConfigMasterConfigDiskConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigMasterConfigDiskConfig.

func (*ClusterSpecClusterConfigMasterConfigDiskConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigMasterConfigDiskConfigCodec

type ClusterSpecClusterConfigMasterConfigDiskConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigMasterConfigDiskConfigCodec) Decode

func (ClusterSpecClusterConfigMasterConfigDiskConfigCodec) Encode

func (ClusterSpecClusterConfigMasterConfigDiskConfigCodec) IsEmpty

type ClusterSpecClusterConfigPreemptibleWorkerConfig

type ClusterSpecClusterConfigPreemptibleWorkerConfig struct {
	// Disk Config
	// +optional
	DiskConfig *ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfig `json:"diskConfig,omitempty" tf:"disk_config"`
	// List of preemptible instance names which have been assigned to the cluster.
	// +optional
	InstanceNames []string `json:"instanceNames,omitempty" tf:"instance_names"`
	// Specifies the number of preemptible nodes to create. Defaults to 0.
	// +optional
	NumInstances *int64 `json:"numInstances,omitempty" tf:"num_instances"`
}

func (*ClusterSpecClusterConfigPreemptibleWorkerConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigPreemptibleWorkerConfig.

func (*ClusterSpecClusterConfigPreemptibleWorkerConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigPreemptibleWorkerConfigCodec

type ClusterSpecClusterConfigPreemptibleWorkerConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigPreemptibleWorkerConfigCodec) Decode

func (ClusterSpecClusterConfigPreemptibleWorkerConfigCodec) Encode

func (ClusterSpecClusterConfigPreemptibleWorkerConfigCodec) IsEmpty

type ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfig

type ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfig struct {
	// Size of the primary disk attached to each preemptible worker node, specified in GB. The smallest allowed disk size is 10GB. GCP will default to a predetermined computed value if not set (currently 500GB). Note: If SSDs are not attached, it also contains the HDFS data blocks and Hadoop working directories.
	// +optional
	BootDiskSizeGb *int64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb"`
	// The disk type of the primary disk attached to each preemptible worker node. One of "pd-ssd" or "pd-standard". Defaults to "pd-standard".
	// +optional
	BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type"`
	// The amount of local SSD disks that will be attached to each preemptible worker node. Defaults to 0.
	// +optional
	NumLocalSsds *int64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds"`
}

func (*ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfig.

func (*ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfigCodec

type ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfigCodec) Decode

func (ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfigCodec) Encode

func (ClusterSpecClusterConfigPreemptibleWorkerConfigDiskConfigCodec) IsEmpty

type ClusterSpecClusterConfigSecurityConfig

type ClusterSpecClusterConfigSecurityConfig struct {
	// Kerberos related configuration
	KerberosConfig *ClusterSpecClusterConfigSecurityConfigKerberosConfig `json:"kerberosConfig" tf:"kerberos_config"`
}

func (*ClusterSpecClusterConfigSecurityConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigSecurityConfig.

func (*ClusterSpecClusterConfigSecurityConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigSecurityConfigCodec

type ClusterSpecClusterConfigSecurityConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigSecurityConfigCodec) Decode

func (ClusterSpecClusterConfigSecurityConfigCodec) Encode

func (ClusterSpecClusterConfigSecurityConfigCodec) IsEmpty

type ClusterSpecClusterConfigSecurityConfigKerberosConfig

type ClusterSpecClusterConfigSecurityConfigKerberosConfig struct {
	// The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
	// +optional
	CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server"`
	// The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
	// +optional
	CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc"`
	// The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust.
	// +optional
	CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm"`
	// The Cloud Storage URI of a KMS encrypted file containing the shared password between the on-cluster
	// Kerberos realm and the remote trusted realm, in a cross realm trust relationship.
	// +optional
	CrossRealmTrustSharedPasswordURI *string `json:"crossRealmTrustSharedPasswordURI,omitempty" tf:"cross_realm_trust_shared_password_uri"`
	// Flag to indicate whether to Kerberize the cluster.
	// +optional
	EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos"`
	// The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database.
	// +optional
	KdcDbKeyURI *string `json:"kdcDbKeyURI,omitempty" tf:"kdc_db_key_uri"`
	// The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc.
	// +optional
	KeyPasswordURI *string `json:"keyPasswordURI,omitempty" tf:"key_password_uri"`
	// The Cloud Storage URI of a KMS encrypted file containing
	// the password to the user provided keystore. For the self-signed certificate, this password is generated
	// by Dataproc
	// +optional
	KeystorePasswordURI *string `json:"keystorePasswordURI,omitempty" tf:"keystore_password_uri"`
	// The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
	// +optional
	KeystoreURI *string `json:"keystoreURI,omitempty" tf:"keystore_uri"`
	// The uri of the KMS key used to encrypt various sensitive files.
	KmsKeyURI *string `json:"kmsKeyURI" tf:"kms_key_uri"`
	// The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm.
	// +optional
	Realm *string `json:"realm,omitempty" tf:"realm"`
	// The cloud Storage URI of a KMS encrypted file containing the root principal password.
	RootPrincipalPasswordURI *string `json:"rootPrincipalPasswordURI" tf:"root_principal_password_uri"`
	// The lifetime of the ticket granting ticket, in hours.
	// +optional
	TgtLifetimeHours *int64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours"`
	// The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc.
	// +optional
	TruststorePasswordURI *string `json:"truststorePasswordURI,omitempty" tf:"truststore_password_uri"`
	// The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
	// +optional
	TruststoreURI *string `json:"truststoreURI,omitempty" tf:"truststore_uri"`
}

func (*ClusterSpecClusterConfigSecurityConfigKerberosConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigSecurityConfigKerberosConfig.

func (*ClusterSpecClusterConfigSecurityConfigKerberosConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigSecurityConfigKerberosConfigCodec

type ClusterSpecClusterConfigSecurityConfigKerberosConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigSecurityConfigKerberosConfigCodec) Decode

func (ClusterSpecClusterConfigSecurityConfigKerberosConfigCodec) Encode

func (ClusterSpecClusterConfigSecurityConfigKerberosConfigCodec) IsEmpty

type ClusterSpecClusterConfigSoftwareConfig

type ClusterSpecClusterConfigSoftwareConfig struct {
	// The Cloud Dataproc image version to use for the cluster - this controls the sets of software versions installed onto the nodes when you create clusters. If not specified, defaults to the latest version.
	// +optional
	ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version"`
	// The set of optional components to activate on the cluster.
	// +optional
	OptionalComponents []string `json:"optionalComponents,omitempty" tf:"optional_components"`
	// A list of override and additional properties (key/value pairs) used to modify various aspects of the common configuration files used when creating a cluster.
	// +optional
	OverrideProperties *map[string]string `json:"overrideProperties,omitempty" tf:"override_properties"`
	// A list of the properties used to set the daemon config files. This will include any values supplied by the user via cluster_config.software_config.override_properties
	// +optional
	Properties map[string]string `json:"properties,omitempty" tf:"properties"`
}

func (*ClusterSpecClusterConfigSoftwareConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigSoftwareConfig.

func (*ClusterSpecClusterConfigSoftwareConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigSoftwareConfigCodec

type ClusterSpecClusterConfigSoftwareConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigSoftwareConfigCodec) Decode

func (ClusterSpecClusterConfigSoftwareConfigCodec) Encode

func (ClusterSpecClusterConfigSoftwareConfigCodec) IsEmpty

type ClusterSpecClusterConfigWorkerConfig

type ClusterSpecClusterConfigWorkerConfig struct {
	// The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times.
	// +optional
	Accelerators []ClusterSpecClusterConfigWorkerConfigAccelerators `json:"accelerators,omitempty" tf:"accelerators"`
	// Disk Config
	// +optional
	DiskConfig *ClusterSpecClusterConfigWorkerConfigDiskConfig `json:"diskConfig,omitempty" tf:"disk_config"`
	// The URI for the image to use for this master/worker
	// +optional
	ImageURI *string `json:"imageURI,omitempty" tf:"image_uri"`
	// List of master/worker instance names which have been assigned to the cluster.
	// +optional
	InstanceNames []string `json:"instanceNames,omitempty" tf:"instance_names"`
	// The name of a Google Compute Engine machine type to create for the master/worker
	// +optional
	MachineType *string `json:"machineType,omitempty" tf:"machine_type"`
	// The name of a minimum generation of CPU family for the master/worker. If not specified, GCP will default to a predetermined computed value for each zone.
	// +optional
	MinCPUPlatform *string `json:"minCPUPlatform,omitempty" tf:"min_cpu_platform"`
	// Specifies the number of master/worker nodes to create. If not specified, GCP will default to a predetermined computed value.
	// +optional
	NumInstances *int64 `json:"numInstances,omitempty" tf:"num_instances"`
}

func (*ClusterSpecClusterConfigWorkerConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigWorkerConfig.

func (*ClusterSpecClusterConfigWorkerConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigWorkerConfigAccelerators

type ClusterSpecClusterConfigWorkerConfigAccelerators struct {
	// The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8.
	AcceleratorCount *int64 `json:"acceleratorCount" tf:"accelerator_count"`
	// The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80.
	AcceleratorType *string `json:"acceleratorType" tf:"accelerator_type"`
}

func (*ClusterSpecClusterConfigWorkerConfigAccelerators) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigWorkerConfigAccelerators.

func (*ClusterSpecClusterConfigWorkerConfigAccelerators) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigWorkerConfigCodec

type ClusterSpecClusterConfigWorkerConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigWorkerConfigCodec) Decode

func (ClusterSpecClusterConfigWorkerConfigCodec) Encode

func (ClusterSpecClusterConfigWorkerConfigCodec) IsEmpty

type ClusterSpecClusterConfigWorkerConfigDiskConfig

type ClusterSpecClusterConfigWorkerConfigDiskConfig struct {
	// Size of the primary disk attached to each node, specified in GB. The primary disk contains the boot volume and system libraries, and the smallest allowed disk size is 10GB. GCP will default to a predetermined computed value if not set (currently 500GB). Note: If SSDs are not attached, it also contains the HDFS data blocks and Hadoop working directories.
	// +optional
	BootDiskSizeGb *int64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb"`
	// The disk type of the primary disk attached to each node. One of "pd-ssd" or "pd-standard". Defaults to "pd-standard".
	// +optional
	BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type"`
	// The amount of local SSD disks that will be attached to each master cluster node. Defaults to 0.
	// +optional
	NumLocalSsds *int64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds"`
}

func (*ClusterSpecClusterConfigWorkerConfigDiskConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecClusterConfigWorkerConfigDiskConfig.

func (*ClusterSpecClusterConfigWorkerConfigDiskConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterSpecClusterConfigWorkerConfigDiskConfigCodec

type ClusterSpecClusterConfigWorkerConfigDiskConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (ClusterSpecClusterConfigWorkerConfigDiskConfigCodec) Decode

func (ClusterSpecClusterConfigWorkerConfigDiskConfigCodec) Encode

func (ClusterSpecClusterConfigWorkerConfigDiskConfigCodec) IsEmpty

type ClusterSpecResource

type ClusterSpecResource struct {
	Timeouts *base.ResourceTimeout `json:"timeouts,omitempty" tf:"timeouts"`

	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// Allows you to configure various aspects of the cluster.
	// +optional
	ClusterConfig *ClusterSpecClusterConfig `json:"clusterConfig,omitempty" tf:"cluster_config"`
	// The timeout duration which allows graceful decomissioning when you change the number of worker nodes directly through a terraform apply
	// +optional
	GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout,omitempty" tf:"graceful_decommission_timeout"`
	// The list of labels (key/value pairs) to be applied to instances in the cluster. GCP generates some itself including goog-dataproc-cluster-name which is the name of the cluster.
	// +optional
	Labels *map[string]string `json:"labels,omitempty" tf:"labels"`
	// The name of the cluster, unique within the project and zone.
	Name *string `json:"name" tf:"name"`
	// The ID of the project in which the cluster will exist. If it is not provided, the provider project is used.
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// The region in which the cluster and associated nodes will be created in. Defaults to global.
	// +optional
	Region *string `json:"region,omitempty" tf:"region"`
}

func (*ClusterSpecResource) DeepCopy

func (in *ClusterSpecResource) DeepCopy() *ClusterSpecResource

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpecResource.

func (*ClusterSpecResource) DeepCopyInto

func (in *ClusterSpecResource) DeepCopyInto(out *ClusterSpecResource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ClusterStatus

type ClusterStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*ClusterStatus) DeepCopy

func (in *ClusterStatus) DeepCopy() *ClusterStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterStatus.

func (*ClusterStatus) DeepCopyInto

func (in *ClusterStatus) DeepCopyInto(out *ClusterStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Job

type Job struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              JobSpec   `json:"spec,omitempty"`
	Status            JobStatus `json:"status,omitempty"`
}

func (*Job) DeepCopy

func (in *Job) DeepCopy() *Job

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Job.

func (*Job) DeepCopyInto

func (in *Job) DeepCopyInto(out *Job)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Job) DeepCopyObject

func (in *Job) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Job) SetupWebhookWithManager

func (r *Job) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Job) ValidateCreate

func (r *Job) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Job) ValidateDelete

func (r *Job) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*Job) ValidateUpdate

func (r *Job) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type JobIamBinding

type JobIamBinding struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              JobIamBindingSpec   `json:"spec,omitempty"`
	Status            JobIamBindingStatus `json:"status,omitempty"`
}

func (*JobIamBinding) DeepCopy

func (in *JobIamBinding) DeepCopy() *JobIamBinding

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamBinding.

func (*JobIamBinding) DeepCopyInto

func (in *JobIamBinding) DeepCopyInto(out *JobIamBinding)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*JobIamBinding) DeepCopyObject

func (in *JobIamBinding) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*JobIamBinding) SetupWebhookWithManager

func (r *JobIamBinding) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*JobIamBinding) ValidateCreate

func (r *JobIamBinding) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*JobIamBinding) ValidateDelete

func (r *JobIamBinding) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*JobIamBinding) ValidateUpdate

func (r *JobIamBinding) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type JobIamBindingList

type JobIamBindingList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of JobIamBinding CRD objects
	Items []JobIamBinding `json:"items,omitempty"`
}

JobIamBindingList is a list of JobIamBindings

func (*JobIamBindingList) DeepCopy

func (in *JobIamBindingList) DeepCopy() *JobIamBindingList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamBindingList.

func (*JobIamBindingList) DeepCopyInto

func (in *JobIamBindingList) DeepCopyInto(out *JobIamBindingList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*JobIamBindingList) DeepCopyObject

func (in *JobIamBindingList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type JobIamBindingSpec

type JobIamBindingSpec struct {
	State *JobIamBindingSpecResource `json:"state,omitempty" tf:"-"`

	Resource JobIamBindingSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*JobIamBindingSpec) DeepCopy

func (in *JobIamBindingSpec) DeepCopy() *JobIamBindingSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamBindingSpec.

func (*JobIamBindingSpec) DeepCopyInto

func (in *JobIamBindingSpec) DeepCopyInto(out *JobIamBindingSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobIamBindingSpecCondition

type JobIamBindingSpecCondition struct {
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	Expression  *string `json:"expression" tf:"expression"`
	Title       *string `json:"title" tf:"title"`
}

func (*JobIamBindingSpecCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamBindingSpecCondition.

func (*JobIamBindingSpecCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobIamBindingSpecConditionCodec

type JobIamBindingSpecConditionCodec struct {
}

+k8s:deepcopy-gen=false

func (JobIamBindingSpecConditionCodec) Decode

func (JobIamBindingSpecConditionCodec) Encode

func (JobIamBindingSpecConditionCodec) IsEmpty

type JobIamBindingSpecResource

type JobIamBindingSpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// +optional
	Condition *JobIamBindingSpecCondition `json:"condition,omitempty" tf:"condition"`
	// +optional
	Etag    *string  `json:"etag,omitempty" tf:"etag"`
	JobID   *string  `json:"jobID" tf:"job_id"`
	Members []string `json:"members" tf:"members"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// +optional
	Region *string `json:"region,omitempty" tf:"region"`
	Role   *string `json:"role" tf:"role"`
}

func (*JobIamBindingSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamBindingSpecResource.

func (*JobIamBindingSpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobIamBindingStatus

type JobIamBindingStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*JobIamBindingStatus) DeepCopy

func (in *JobIamBindingStatus) DeepCopy() *JobIamBindingStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamBindingStatus.

func (*JobIamBindingStatus) DeepCopyInto

func (in *JobIamBindingStatus) DeepCopyInto(out *JobIamBindingStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobIamMember

type JobIamMember struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              JobIamMemberSpec   `json:"spec,omitempty"`
	Status            JobIamMemberStatus `json:"status,omitempty"`
}

func (*JobIamMember) DeepCopy

func (in *JobIamMember) DeepCopy() *JobIamMember

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamMember.

func (*JobIamMember) DeepCopyInto

func (in *JobIamMember) DeepCopyInto(out *JobIamMember)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*JobIamMember) DeepCopyObject

func (in *JobIamMember) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*JobIamMember) SetupWebhookWithManager

func (r *JobIamMember) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*JobIamMember) ValidateCreate

func (r *JobIamMember) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*JobIamMember) ValidateDelete

func (r *JobIamMember) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*JobIamMember) ValidateUpdate

func (r *JobIamMember) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type JobIamMemberList

type JobIamMemberList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of JobIamMember CRD objects
	Items []JobIamMember `json:"items,omitempty"`
}

JobIamMemberList is a list of JobIamMembers

func (*JobIamMemberList) DeepCopy

func (in *JobIamMemberList) DeepCopy() *JobIamMemberList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamMemberList.

func (*JobIamMemberList) DeepCopyInto

func (in *JobIamMemberList) DeepCopyInto(out *JobIamMemberList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*JobIamMemberList) DeepCopyObject

func (in *JobIamMemberList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type JobIamMemberSpec

type JobIamMemberSpec struct {
	State *JobIamMemberSpecResource `json:"state,omitempty" tf:"-"`

	Resource JobIamMemberSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*JobIamMemberSpec) DeepCopy

func (in *JobIamMemberSpec) DeepCopy() *JobIamMemberSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamMemberSpec.

func (*JobIamMemberSpec) DeepCopyInto

func (in *JobIamMemberSpec) DeepCopyInto(out *JobIamMemberSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobIamMemberSpecCondition

type JobIamMemberSpecCondition struct {
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	Expression  *string `json:"expression" tf:"expression"`
	Title       *string `json:"title" tf:"title"`
}

func (*JobIamMemberSpecCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamMemberSpecCondition.

func (*JobIamMemberSpecCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobIamMemberSpecConditionCodec

type JobIamMemberSpecConditionCodec struct {
}

+k8s:deepcopy-gen=false

func (JobIamMemberSpecConditionCodec) Decode

func (JobIamMemberSpecConditionCodec) Encode

func (JobIamMemberSpecConditionCodec) IsEmpty

type JobIamMemberSpecResource

type JobIamMemberSpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// +optional
	Condition *JobIamMemberSpecCondition `json:"condition,omitempty" tf:"condition"`
	// +optional
	Etag   *string `json:"etag,omitempty" tf:"etag"`
	JobID  *string `json:"jobID" tf:"job_id"`
	Member *string `json:"member" tf:"member"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// +optional
	Region *string `json:"region,omitempty" tf:"region"`
	Role   *string `json:"role" tf:"role"`
}

func (*JobIamMemberSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamMemberSpecResource.

func (*JobIamMemberSpecResource) DeepCopyInto

func (in *JobIamMemberSpecResource) DeepCopyInto(out *JobIamMemberSpecResource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobIamMemberStatus

type JobIamMemberStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*JobIamMemberStatus) DeepCopy

func (in *JobIamMemberStatus) DeepCopy() *JobIamMemberStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamMemberStatus.

func (*JobIamMemberStatus) DeepCopyInto

func (in *JobIamMemberStatus) DeepCopyInto(out *JobIamMemberStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobIamPolicy

type JobIamPolicy struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              JobIamPolicySpec   `json:"spec,omitempty"`
	Status            JobIamPolicyStatus `json:"status,omitempty"`
}

func (*JobIamPolicy) DeepCopy

func (in *JobIamPolicy) DeepCopy() *JobIamPolicy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamPolicy.

func (*JobIamPolicy) DeepCopyInto

func (in *JobIamPolicy) DeepCopyInto(out *JobIamPolicy)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*JobIamPolicy) DeepCopyObject

func (in *JobIamPolicy) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*JobIamPolicy) SetupWebhookWithManager

func (r *JobIamPolicy) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*JobIamPolicy) ValidateCreate

func (r *JobIamPolicy) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*JobIamPolicy) ValidateDelete

func (r *JobIamPolicy) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*JobIamPolicy) ValidateUpdate

func (r *JobIamPolicy) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type JobIamPolicyList

type JobIamPolicyList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of JobIamPolicy CRD objects
	Items []JobIamPolicy `json:"items,omitempty"`
}

JobIamPolicyList is a list of JobIamPolicys

func (*JobIamPolicyList) DeepCopy

func (in *JobIamPolicyList) DeepCopy() *JobIamPolicyList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamPolicyList.

func (*JobIamPolicyList) DeepCopyInto

func (in *JobIamPolicyList) DeepCopyInto(out *JobIamPolicyList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*JobIamPolicyList) DeepCopyObject

func (in *JobIamPolicyList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type JobIamPolicySpec

type JobIamPolicySpec struct {
	State *JobIamPolicySpecResource `json:"state,omitempty" tf:"-"`

	Resource JobIamPolicySpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*JobIamPolicySpec) DeepCopy

func (in *JobIamPolicySpec) DeepCopy() *JobIamPolicySpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamPolicySpec.

func (*JobIamPolicySpec) DeepCopyInto

func (in *JobIamPolicySpec) DeepCopyInto(out *JobIamPolicySpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobIamPolicySpecResource

type JobIamPolicySpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// +optional
	Etag       *string `json:"etag,omitempty" tf:"etag"`
	JobID      *string `json:"jobID" tf:"job_id"`
	PolicyData *string `json:"policyData" tf:"policy_data"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// +optional
	Region *string `json:"region,omitempty" tf:"region"`
}

func (*JobIamPolicySpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamPolicySpecResource.

func (*JobIamPolicySpecResource) DeepCopyInto

func (in *JobIamPolicySpecResource) DeepCopyInto(out *JobIamPolicySpecResource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobIamPolicyStatus

type JobIamPolicyStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*JobIamPolicyStatus) DeepCopy

func (in *JobIamPolicyStatus) DeepCopy() *JobIamPolicyStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobIamPolicyStatus.

func (*JobIamPolicyStatus) DeepCopyInto

func (in *JobIamPolicyStatus) DeepCopyInto(out *JobIamPolicyStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobList

type JobList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of Job CRD objects
	Items []Job `json:"items,omitempty"`
}

JobList is a list of Jobs

func (*JobList) DeepCopy

func (in *JobList) DeepCopy() *JobList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobList.

func (*JobList) DeepCopyInto

func (in *JobList) DeepCopyInto(out *JobList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*JobList) DeepCopyObject

func (in *JobList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type JobSpec

type JobSpec struct {
	State *JobSpecResource `json:"state,omitempty" tf:"-"`

	Resource JobSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*JobSpec) DeepCopy

func (in *JobSpec) DeepCopy() *JobSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpec.

func (*JobSpec) DeepCopyInto

func (in *JobSpec) DeepCopyInto(out *JobSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecHadoopConfig

type JobSpecHadoopConfig struct {
	// HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
	// +optional
	ArchiveUris []string `json:"archiveUris,omitempty" tf:"archive_uris"`
	// The arguments to pass to the driver.
	// +optional
	Args []string `json:"args,omitempty" tf:"args"`
	// HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
	// +optional
	FileUris []string `json:"fileUris,omitempty" tf:"file_uris"`
	// HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// The runtime logging config of the job
	// +optional
	LoggingConfig *JobSpecHadoopConfigLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
	// +optional
	MainClass *string `json:"mainClass,omitempty" tf:"main_class"`
	// The HCFS URI of jar file containing the driver jar. Conflicts with main_class
	// +optional
	MainJarFileURI *string `json:"mainJarFileURI,omitempty" tf:"main_jar_file_uri"`
	// A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
}

func (*JobSpecHadoopConfig) DeepCopy

func (in *JobSpecHadoopConfig) DeepCopy() *JobSpecHadoopConfig

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecHadoopConfig.

func (*JobSpecHadoopConfig) DeepCopyInto

func (in *JobSpecHadoopConfig) DeepCopyInto(out *JobSpecHadoopConfig)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecHadoopConfigCodec

type JobSpecHadoopConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecHadoopConfigCodec) Decode

func (JobSpecHadoopConfigCodec) Encode

func (JobSpecHadoopConfigCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecHadoopConfigCodec) IsEmpty

type JobSpecHadoopConfigLoggingConfig

type JobSpecHadoopConfigLoggingConfig struct {
	// Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
	DriverLogLevels *map[string]string `json:"driverLogLevels" tf:"driver_log_levels"`
}

func (*JobSpecHadoopConfigLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecHadoopConfigLoggingConfig.

func (*JobSpecHadoopConfigLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecHadoopConfigLoggingConfigCodec

type JobSpecHadoopConfigLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecHadoopConfigLoggingConfigCodec) Decode

func (JobSpecHadoopConfigLoggingConfigCodec) Encode

func (JobSpecHadoopConfigLoggingConfigCodec) IsEmpty

type JobSpecHiveConfig

type JobSpecHiveConfig struct {
	// Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
	// +optional
	ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure"`
	// HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
	// HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
	// +optional
	QueryFileURI *string `json:"queryFileURI,omitempty" tf:"query_file_uri"`
	// The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
	// +optional
	QueryList []string `json:"queryList,omitempty" tf:"query_list"`
	// Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
	// +optional
	ScriptVariables *map[string]string `json:"scriptVariables,omitempty" tf:"script_variables"`
}

func (*JobSpecHiveConfig) DeepCopy

func (in *JobSpecHiveConfig) DeepCopy() *JobSpecHiveConfig

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecHiveConfig.

func (*JobSpecHiveConfig) DeepCopyInto

func (in *JobSpecHiveConfig) DeepCopyInto(out *JobSpecHiveConfig)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecHiveConfigCodec

type JobSpecHiveConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecHiveConfigCodec) Decode

func (JobSpecHiveConfigCodec) Encode

func (JobSpecHiveConfigCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecHiveConfigCodec) IsEmpty

type JobSpecPigConfig

type JobSpecPigConfig struct {
	// Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
	// +optional
	ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure"`
	// HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// The runtime logging config of the job
	// +optional
	LoggingConfig *JobSpecPigConfigLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
	// HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
	// +optional
	QueryFileURI *string `json:"queryFileURI,omitempty" tf:"query_file_uri"`
	// The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
	// +optional
	QueryList []string `json:"queryList,omitempty" tf:"query_list"`
	// Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
	// +optional
	ScriptVariables *map[string]string `json:"scriptVariables,omitempty" tf:"script_variables"`
}

func (*JobSpecPigConfig) DeepCopy

func (in *JobSpecPigConfig) DeepCopy() *JobSpecPigConfig

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecPigConfig.

func (*JobSpecPigConfig) DeepCopyInto

func (in *JobSpecPigConfig) DeepCopyInto(out *JobSpecPigConfig)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecPigConfigCodec

type JobSpecPigConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecPigConfigCodec) Decode

func (JobSpecPigConfigCodec) Encode

func (JobSpecPigConfigCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecPigConfigCodec) IsEmpty

type JobSpecPigConfigLoggingConfig

type JobSpecPigConfigLoggingConfig struct {
	// Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
	DriverLogLevels *map[string]string `json:"driverLogLevels" tf:"driver_log_levels"`
}

func (*JobSpecPigConfigLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecPigConfigLoggingConfig.

func (*JobSpecPigConfigLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecPigConfigLoggingConfigCodec

type JobSpecPigConfigLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecPigConfigLoggingConfigCodec) Decode

func (JobSpecPigConfigLoggingConfigCodec) Encode

func (JobSpecPigConfigLoggingConfigCodec) IsEmpty

type JobSpecPlacement

type JobSpecPlacement struct {
	// The name of the cluster where the job will be submitted
	ClusterName *string `json:"clusterName" tf:"cluster_name"`
	// Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
	// +optional
	ClusterUUID *string `json:"clusterUUID,omitempty" tf:"cluster_uuid"`
}

func (*JobSpecPlacement) DeepCopy

func (in *JobSpecPlacement) DeepCopy() *JobSpecPlacement

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecPlacement.

func (*JobSpecPlacement) DeepCopyInto

func (in *JobSpecPlacement) DeepCopyInto(out *JobSpecPlacement)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecPlacementCodec

type JobSpecPlacementCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecPlacementCodec) Decode

func (JobSpecPlacementCodec) Encode

func (JobSpecPlacementCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecPlacementCodec) IsEmpty

type JobSpecPysparkConfig

type JobSpecPysparkConfig struct {
	// Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip
	// +optional
	ArchiveUris []string `json:"archiveUris,omitempty" tf:"archive_uris"`
	// Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission
	// +optional
	Args []string `json:"args,omitempty" tf:"args"`
	// Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks
	// +optional
	FileUris []string `json:"fileUris,omitempty" tf:"file_uris"`
	// Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// The runtime logging config of the job
	// +optional
	LoggingConfig *JobSpecPysparkConfigLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file
	MainPythonFileURI *string `json:"mainPythonFileURI" tf:"main_python_file_uri"`
	// Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
	// Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip
	// +optional
	PythonFileUris []string `json:"pythonFileUris,omitempty" tf:"python_file_uris"`
}

func (*JobSpecPysparkConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecPysparkConfig.

func (*JobSpecPysparkConfig) DeepCopyInto

func (in *JobSpecPysparkConfig) DeepCopyInto(out *JobSpecPysparkConfig)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecPysparkConfigCodec

type JobSpecPysparkConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecPysparkConfigCodec) Decode

func (JobSpecPysparkConfigCodec) Encode

func (JobSpecPysparkConfigCodec) IsEmpty

type JobSpecPysparkConfigLoggingConfig

type JobSpecPysparkConfigLoggingConfig struct {
	// Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
	DriverLogLevels *map[string]string `json:"driverLogLevels" tf:"driver_log_levels"`
}

func (*JobSpecPysparkConfigLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecPysparkConfigLoggingConfig.

func (*JobSpecPysparkConfigLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecPysparkConfigLoggingConfigCodec

type JobSpecPysparkConfigLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecPysparkConfigLoggingConfigCodec) Decode

func (JobSpecPysparkConfigLoggingConfigCodec) Encode

func (JobSpecPysparkConfigLoggingConfigCodec) IsEmpty

type JobSpecReference

type JobSpecReference struct {
	// The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
	// +optional
	JobID *string `json:"jobID,omitempty" tf:"job_id"`
}

func (*JobSpecReference) DeepCopy

func (in *JobSpecReference) DeepCopy() *JobSpecReference

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecReference.

func (*JobSpecReference) DeepCopyInto

func (in *JobSpecReference) DeepCopyInto(out *JobSpecReference)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecReferenceCodec

type JobSpecReferenceCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecReferenceCodec) Decode

func (JobSpecReferenceCodec) Encode

func (JobSpecReferenceCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecReferenceCodec) IsEmpty

type JobSpecResource

type JobSpecResource struct {
	Timeouts *base.ResourceTimeout `json:"timeouts,omitempty" tf:"timeouts"`

	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// Output-only. If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
	// +optional
	DriverControlsFilesURI *string `json:"driverControlsFilesURI,omitempty" tf:"driver_controls_files_uri"`
	// Output-only. A URI pointing to the location of the stdout of the job's driver program
	// +optional
	DriverOutputResourceURI *string `json:"driverOutputResourceURI,omitempty" tf:"driver_output_resource_uri"`
	// By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
	// +optional
	ForceDelete *bool `json:"forceDelete,omitempty" tf:"force_delete"`
	// The config of Hadoop job
	// +optional
	HadoopConfig *JobSpecHadoopConfig `json:"hadoopConfig,omitempty" tf:"hadoop_config"`
	// The config of hive job
	// +optional
	HiveConfig *JobSpecHiveConfig `json:"hiveConfig,omitempty" tf:"hive_config"`
	// Optional. The labels to associate with this job.
	// +optional
	Labels *map[string]string `json:"labels,omitempty" tf:"labels"`
	// The config of pag job.
	// +optional
	PigConfig *JobSpecPigConfig `json:"pigConfig,omitempty" tf:"pig_config"`
	// The config of job placement.
	Placement *JobSpecPlacement `json:"placement" tf:"placement"`
	// The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// The config of pySpark job.
	// +optional
	PysparkConfig *JobSpecPysparkConfig `json:"pysparkConfig,omitempty" tf:"pyspark_config"`
	// The reference of the job
	// +optional
	Reference *JobSpecReference `json:"reference,omitempty" tf:"reference"`
	// The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
	// +optional
	Region *string `json:"region,omitempty" tf:"region"`
	// Optional. Job scheduling configuration.
	// +optional
	Scheduling *JobSpecScheduling `json:"scheduling,omitempty" tf:"scheduling"`
	// The config of the Spark job.
	// +optional
	SparkConfig *JobSpecSparkConfig `json:"sparkConfig,omitempty" tf:"spark_config"`
	// The config of SparkSql job
	// +optional
	SparksqlConfig *JobSpecSparksqlConfig `json:"sparksqlConfig,omitempty" tf:"sparksql_config"`
	// The status of the job.
	// +optional
	Status []JobSpecStatus `json:"status,omitempty" tf:"status"`
}

func (*JobSpecResource) DeepCopy

func (in *JobSpecResource) DeepCopy() *JobSpecResource

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecResource.

func (*JobSpecResource) DeepCopyInto

func (in *JobSpecResource) DeepCopyInto(out *JobSpecResource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecScheduling

type JobSpecScheduling struct {
	// Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
	MaxFailuresPerHour *int64 `json:"maxFailuresPerHour" tf:"max_failures_per_hour"`
	// Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
	MaxFailuresTotal *int64 `json:"maxFailuresTotal" tf:"max_failures_total"`
}

func (*JobSpecScheduling) DeepCopy

func (in *JobSpecScheduling) DeepCopy() *JobSpecScheduling

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecScheduling.

func (*JobSpecScheduling) DeepCopyInto

func (in *JobSpecScheduling) DeepCopyInto(out *JobSpecScheduling)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecSchedulingCodec

type JobSpecSchedulingCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecSchedulingCodec) Decode

func (JobSpecSchedulingCodec) Encode

func (JobSpecSchedulingCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecSchedulingCodec) IsEmpty

type JobSpecSparkConfig

type JobSpecSparkConfig struct {
	// HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
	// +optional
	ArchiveUris []string `json:"archiveUris,omitempty" tf:"archive_uris"`
	// The arguments to pass to the driver.
	// +optional
	Args []string `json:"args,omitempty" tf:"args"`
	// HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
	// +optional
	FileUris []string `json:"fileUris,omitempty" tf:"file_uris"`
	// HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// The runtime logging config of the job
	// +optional
	LoggingConfig *JobSpecSparkConfigLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
	// +optional
	MainClass *string `json:"mainClass,omitempty" tf:"main_class"`
	// The HCFS URI of jar file containing the driver jar. Conflicts with main_class
	// +optional
	MainJarFileURI *string `json:"mainJarFileURI,omitempty" tf:"main_jar_file_uri"`
	// A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
}

func (*JobSpecSparkConfig) DeepCopy

func (in *JobSpecSparkConfig) DeepCopy() *JobSpecSparkConfig

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecSparkConfig.

func (*JobSpecSparkConfig) DeepCopyInto

func (in *JobSpecSparkConfig) DeepCopyInto(out *JobSpecSparkConfig)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecSparkConfigCodec

type JobSpecSparkConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecSparkConfigCodec) Decode

func (JobSpecSparkConfigCodec) Encode

func (JobSpecSparkConfigCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecSparkConfigCodec) IsEmpty

type JobSpecSparkConfigLoggingConfig

type JobSpecSparkConfigLoggingConfig struct {
	// Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
	DriverLogLevels *map[string]string `json:"driverLogLevels" tf:"driver_log_levels"`
}

func (*JobSpecSparkConfigLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecSparkConfigLoggingConfig.

func (*JobSpecSparkConfigLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecSparkConfigLoggingConfigCodec

type JobSpecSparkConfigLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecSparkConfigLoggingConfigCodec) Decode

func (JobSpecSparkConfigLoggingConfigCodec) Encode

func (JobSpecSparkConfigLoggingConfigCodec) IsEmpty

type JobSpecSparksqlConfig

type JobSpecSparksqlConfig struct {
	// HCFS URIs of jar files to be added to the Spark CLASSPATH.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// The runtime logging config of the job
	// +optional
	LoggingConfig *JobSpecSparksqlConfigLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
	// The HCFS URI of the script that contains SQL queries. Conflicts with query_list
	// +optional
	QueryFileURI *string `json:"queryFileURI,omitempty" tf:"query_file_uri"`
	// The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
	// +optional
	QueryList []string `json:"queryList,omitempty" tf:"query_list"`
	// Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
	// +optional
	ScriptVariables *map[string]string `json:"scriptVariables,omitempty" tf:"script_variables"`
}

func (*JobSpecSparksqlConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecSparksqlConfig.

func (*JobSpecSparksqlConfig) DeepCopyInto

func (in *JobSpecSparksqlConfig) DeepCopyInto(out *JobSpecSparksqlConfig)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecSparksqlConfigCodec

type JobSpecSparksqlConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecSparksqlConfigCodec) Decode

func (JobSpecSparksqlConfigCodec) Encode

func (JobSpecSparksqlConfigCodec) IsEmpty

type JobSpecSparksqlConfigLoggingConfig

type JobSpecSparksqlConfigLoggingConfig struct {
	// Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
	DriverLogLevels *map[string]string `json:"driverLogLevels" tf:"driver_log_levels"`
}

func (*JobSpecSparksqlConfigLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecSparksqlConfigLoggingConfig.

func (*JobSpecSparksqlConfigLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecSparksqlConfigLoggingConfigCodec

type JobSpecSparksqlConfigLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecSparksqlConfigLoggingConfigCodec) Decode

func (JobSpecSparksqlConfigLoggingConfigCodec) Encode

func (JobSpecSparksqlConfigLoggingConfigCodec) IsEmpty

type JobSpecStatus

type JobSpecStatus struct {
	// Output-only. Optional job state details, such as an error description if the state is ERROR
	// +optional
	Details *string `json:"details,omitempty" tf:"details"`
	// Output-only. A state message specifying the overall job state
	// +optional
	State *string `json:"state,omitempty" tf:"state"`
	// Output-only. The time when this state was entered
	// +optional
	StateStartTime *string `json:"stateStartTime,omitempty" tf:"state_start_time"`
	// Output-only. Additional state information, which includes status reported by the agent
	// +optional
	Substate *string `json:"substate,omitempty" tf:"substate"`
}

func (*JobSpecStatus) DeepCopy

func (in *JobSpecStatus) DeepCopy() *JobSpecStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecStatus.

func (*JobSpecStatus) DeepCopyInto

func (in *JobSpecStatus) DeepCopyInto(out *JobSpecStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobStatus

type JobStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*JobStatus) DeepCopy

func (in *JobStatus) DeepCopy() *JobStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatus.

func (*JobStatus) DeepCopyInto

func (in *JobStatus) DeepCopyInto(out *JobStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplate

type WorkflowTemplate struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              WorkflowTemplateSpec   `json:"spec,omitempty"`
	Status            WorkflowTemplateStatus `json:"status,omitempty"`
}

func (*WorkflowTemplate) DeepCopy

func (in *WorkflowTemplate) DeepCopy() *WorkflowTemplate

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplate.

func (*WorkflowTemplate) DeepCopyInto

func (in *WorkflowTemplate) DeepCopyInto(out *WorkflowTemplate)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WorkflowTemplate) DeepCopyObject

func (in *WorkflowTemplate) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*WorkflowTemplate) SetupWebhookWithManager

func (r *WorkflowTemplate) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*WorkflowTemplate) ValidateCreate

func (r *WorkflowTemplate) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*WorkflowTemplate) ValidateDelete

func (r *WorkflowTemplate) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*WorkflowTemplate) ValidateUpdate

func (r *WorkflowTemplate) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type WorkflowTemplateList

type WorkflowTemplateList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of WorkflowTemplate CRD objects
	Items []WorkflowTemplate `json:"items,omitempty"`
}

WorkflowTemplateList is a list of WorkflowTemplates

func (*WorkflowTemplateList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateList.

func (*WorkflowTemplateList) DeepCopyInto

func (in *WorkflowTemplateList) DeepCopyInto(out *WorkflowTemplateList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WorkflowTemplateList) DeepCopyObject

func (in *WorkflowTemplateList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type WorkflowTemplateSpec

type WorkflowTemplateSpec struct {
	State *WorkflowTemplateSpecResource `json:"state,omitempty" tf:"-"`

	Resource WorkflowTemplateSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*WorkflowTemplateSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpec.

func (*WorkflowTemplateSpec) DeepCopyInto

func (in *WorkflowTemplateSpec) DeepCopyInto(out *WorkflowTemplateSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobs

type WorkflowTemplateSpecJobs struct {
	// Optional. Job is a Hadoop job.
	// +optional
	HadoopJob *WorkflowTemplateSpecJobsHadoopJob `json:"hadoopJob,omitempty" tf:"hadoop_job"`
	// Optional. Job is a Hive job.
	// +optional
	HiveJob *WorkflowTemplateSpecJobsHiveJob `json:"hiveJob,omitempty" tf:"hive_job"`
	// Optional. The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: p{Ll}p{Lo}{0,62} Label values must be between 1 and 63 characters long, and must conform to the following regular expression: [p{Ll}p{Lo}p{N}_-]{0,63} No more than 32 labels can be associated with a given job.
	// +optional
	Labels *map[string]string `json:"labels,omitempty" tf:"labels"`
	// Optional. Job is a Pig job.
	// +optional
	PigJob *WorkflowTemplateSpecJobsPigJob `json:"pigJob,omitempty" tf:"pig_job"`
	// Optional. The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
	// +optional
	PrerequisiteStepIDS []string `json:"prerequisiteStepIDS,omitempty" tf:"prerequisite_step_ids"`
	// Optional. Job is a Presto job.
	// +optional
	PrestoJob *WorkflowTemplateSpecJobsPrestoJob `json:"prestoJob,omitempty" tf:"presto_job"`
	// Optional. Job is a PySpark job.
	// +optional
	PysparkJob *WorkflowTemplateSpecJobsPysparkJob `json:"pysparkJob,omitempty" tf:"pyspark_job"`
	// Optional. Job scheduling configuration.
	// +optional
	Scheduling *WorkflowTemplateSpecJobsScheduling `json:"scheduling,omitempty" tf:"scheduling"`
	// Optional. Job is a Spark job.
	// +optional
	SparkJob *WorkflowTemplateSpecJobsSparkJob `json:"sparkJob,omitempty" tf:"spark_job"`
	// Optional. Job is a SparkR job.
	// +optional
	SparkRJob *WorkflowTemplateSpecJobsSparkRJob `json:"sparkRJob,omitempty" tf:"spark_r_job"`
	// Optional. Job is a SparkSql job.
	// +optional
	SparkSQLJob *WorkflowTemplateSpecJobsSparkSQLJob `json:"sparkSQLJob,omitempty" tf:"spark_sql_job"`
	// Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job `goog-dataproc-workflow-step-id` label, and in prerequisiteStepIds field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
	StepID *string `json:"stepID" tf:"step_id"`
}

func (*WorkflowTemplateSpecJobs) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobs.

func (*WorkflowTemplateSpecJobs) DeepCopyInto

func (in *WorkflowTemplateSpecJobs) DeepCopyInto(out *WorkflowTemplateSpecJobs)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsHadoopJob

type WorkflowTemplateSpecJobsHadoopJob struct {
	// Optional. HCFS URIs of archives to be extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip.
	// +optional
	ArchiveUris []string `json:"archiveUris,omitempty" tf:"archive_uris"`
	// Optional. The arguments to pass to the driver. Do not include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
	// +optional
	Args []string `json:"args,omitempty" tf:"args"`
	// Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
	// +optional
	FileUris []string `json:"fileUris,omitempty" tf:"file_uris"`
	// Optional. Jar file URIs to add to the CLASSPATHs of the Hadoop driver and tasks.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// Optional. The runtime log config for job execution.
	// +optional
	LoggingConfig *WorkflowTemplateSpecJobsHadoopJobLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in `jar_file_uris`.
	// +optional
	MainClass *string `json:"mainClass,omitempty" tf:"main_class"`
	// The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'
	// +optional
	MainJarFileURI *string `json:"mainJarFileURI,omitempty" tf:"main_jar_file_uri"`
	// Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
}

func (*WorkflowTemplateSpecJobsHadoopJob) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsHadoopJob.

func (*WorkflowTemplateSpecJobsHadoopJob) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsHadoopJobCodec

type WorkflowTemplateSpecJobsHadoopJobCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsHadoopJobCodec) Decode

func (WorkflowTemplateSpecJobsHadoopJobCodec) Encode

func (WorkflowTemplateSpecJobsHadoopJobCodec) IsEmpty

type WorkflowTemplateSpecJobsHadoopJobLoggingConfig

type WorkflowTemplateSpecJobsHadoopJobLoggingConfig struct {
	// The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
	// +optional
	DriverLogLevels *map[string]string `json:"driverLogLevels,omitempty" tf:"driver_log_levels"`
}

func (*WorkflowTemplateSpecJobsHadoopJobLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsHadoopJobLoggingConfig.

func (*WorkflowTemplateSpecJobsHadoopJobLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsHadoopJobLoggingConfigCodec

type WorkflowTemplateSpecJobsHadoopJobLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsHadoopJobLoggingConfigCodec) Decode

func (WorkflowTemplateSpecJobsHadoopJobLoggingConfigCodec) Encode

func (WorkflowTemplateSpecJobsHadoopJobLoggingConfigCodec) IsEmpty

type WorkflowTemplateSpecJobsHiveJob

type WorkflowTemplateSpecJobsHiveJob struct {
	// Optional. Whether to continue executing queries if a query fails. The default value is `false`. Setting to `true` can be useful when executing independent parallel queries.
	// +optional
	ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure"`
	// Optional. HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
	// The HCFS URI of the script that contains Hive queries.
	// +optional
	QueryFileURI *string `json:"queryFileURI,omitempty" tf:"query_file_uri"`
	// A list of queries.
	// +optional
	QueryList *WorkflowTemplateSpecJobsHiveJobQueryList `json:"queryList,omitempty" tf:"query_list"`
	// Optional. Mapping of query variable names to values (equivalent to the Hive command: `SET name="value";`).
	// +optional
	ScriptVariables *map[string]string `json:"scriptVariables,omitempty" tf:"script_variables"`
}

func (*WorkflowTemplateSpecJobsHiveJob) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsHiveJob.

func (*WorkflowTemplateSpecJobsHiveJob) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsHiveJobCodec

type WorkflowTemplateSpecJobsHiveJobCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsHiveJobCodec) Decode

func (WorkflowTemplateSpecJobsHiveJobCodec) Encode

func (WorkflowTemplateSpecJobsHiveJobCodec) IsEmpty

type WorkflowTemplateSpecJobsHiveJobQueryList

type WorkflowTemplateSpecJobsHiveJobQueryList struct {
	// Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } }
	Queries []string `json:"queries" tf:"queries"`
}

func (*WorkflowTemplateSpecJobsHiveJobQueryList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsHiveJobQueryList.

func (*WorkflowTemplateSpecJobsHiveJobQueryList) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsHiveJobQueryListCodec

type WorkflowTemplateSpecJobsHiveJobQueryListCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsHiveJobQueryListCodec) Decode

func (WorkflowTemplateSpecJobsHiveJobQueryListCodec) Encode

func (WorkflowTemplateSpecJobsHiveJobQueryListCodec) IsEmpty

type WorkflowTemplateSpecJobsPigJob

type WorkflowTemplateSpecJobsPigJob struct {
	// Optional. Whether to continue executing queries if a query fails. The default value is `false`. Setting to `true` can be useful when executing independent parallel queries.
	// +optional
	ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure"`
	// Optional. HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// Optional. The runtime log config for job execution.
	// +optional
	LoggingConfig *WorkflowTemplateSpecJobsPigJobLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
	// The HCFS URI of the script that contains the Pig queries.
	// +optional
	QueryFileURI *string `json:"queryFileURI,omitempty" tf:"query_file_uri"`
	// A list of queries.
	// +optional
	QueryList *WorkflowTemplateSpecJobsPigJobQueryList `json:"queryList,omitempty" tf:"query_list"`
	// Optional. Mapping of query variable names to values (equivalent to the Pig command: `name=[value]`).
	// +optional
	ScriptVariables *map[string]string `json:"scriptVariables,omitempty" tf:"script_variables"`
}

func (*WorkflowTemplateSpecJobsPigJob) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsPigJob.

func (*WorkflowTemplateSpecJobsPigJob) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsPigJobCodec

type WorkflowTemplateSpecJobsPigJobCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsPigJobCodec) Decode

func (WorkflowTemplateSpecJobsPigJobCodec) Encode

func (WorkflowTemplateSpecJobsPigJobCodec) IsEmpty

type WorkflowTemplateSpecJobsPigJobLoggingConfig

type WorkflowTemplateSpecJobsPigJobLoggingConfig struct {
	// The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
	// +optional
	DriverLogLevels *map[string]string `json:"driverLogLevels,omitempty" tf:"driver_log_levels"`
}

func (*WorkflowTemplateSpecJobsPigJobLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsPigJobLoggingConfig.

func (*WorkflowTemplateSpecJobsPigJobLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsPigJobLoggingConfigCodec

type WorkflowTemplateSpecJobsPigJobLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsPigJobLoggingConfigCodec) Decode

func (WorkflowTemplateSpecJobsPigJobLoggingConfigCodec) Encode

func (WorkflowTemplateSpecJobsPigJobLoggingConfigCodec) IsEmpty

type WorkflowTemplateSpecJobsPigJobQueryList

type WorkflowTemplateSpecJobsPigJobQueryList struct {
	// Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } }
	Queries []string `json:"queries" tf:"queries"`
}

func (*WorkflowTemplateSpecJobsPigJobQueryList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsPigJobQueryList.

func (*WorkflowTemplateSpecJobsPigJobQueryList) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsPigJobQueryListCodec

type WorkflowTemplateSpecJobsPigJobQueryListCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsPigJobQueryListCodec) Decode

func (WorkflowTemplateSpecJobsPigJobQueryListCodec) Encode

func (WorkflowTemplateSpecJobsPigJobQueryListCodec) IsEmpty

type WorkflowTemplateSpecJobsPrestoJob

type WorkflowTemplateSpecJobsPrestoJob struct {
	// Optional. Presto client tags to attach to this query
	// +optional
	ClientTags []string `json:"clientTags,omitempty" tf:"client_tags"`
	// Optional. Whether to continue executing queries if a query fails. The default value is `false`. Setting to `true` can be useful when executing independent parallel queries.
	// +optional
	ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure"`
	// Optional. The runtime log config for job execution.
	// +optional
	LoggingConfig *WorkflowTemplateSpecJobsPrestoJobLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// Optional. The format in which query output will be displayed. See the Presto documentation for supported output formats
	// +optional
	OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format"`
	// Optional. A mapping of property names to values. Used to set Presto [session properties](https://prestodb.io/docs/current/sql/set-session.html) Equivalent to using the --session flag in the Presto CLI
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
	// The HCFS URI of the script that contains SQL queries.
	// +optional
	QueryFileURI *string `json:"queryFileURI,omitempty" tf:"query_file_uri"`
	// A list of queries.
	// +optional
	QueryList *WorkflowTemplateSpecJobsPrestoJobQueryList `json:"queryList,omitempty" tf:"query_list"`
}

func (*WorkflowTemplateSpecJobsPrestoJob) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsPrestoJob.

func (*WorkflowTemplateSpecJobsPrestoJob) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsPrestoJobCodec

type WorkflowTemplateSpecJobsPrestoJobCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsPrestoJobCodec) Decode

func (WorkflowTemplateSpecJobsPrestoJobCodec) Encode

func (WorkflowTemplateSpecJobsPrestoJobCodec) IsEmpty

type WorkflowTemplateSpecJobsPrestoJobLoggingConfig

type WorkflowTemplateSpecJobsPrestoJobLoggingConfig struct {
	// The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
	// +optional
	DriverLogLevels *map[string]string `json:"driverLogLevels,omitempty" tf:"driver_log_levels"`
}

func (*WorkflowTemplateSpecJobsPrestoJobLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsPrestoJobLoggingConfig.

func (*WorkflowTemplateSpecJobsPrestoJobLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsPrestoJobLoggingConfigCodec

type WorkflowTemplateSpecJobsPrestoJobLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsPrestoJobLoggingConfigCodec) Decode

func (WorkflowTemplateSpecJobsPrestoJobLoggingConfigCodec) Encode

func (WorkflowTemplateSpecJobsPrestoJobLoggingConfigCodec) IsEmpty

type WorkflowTemplateSpecJobsPrestoJobQueryList

type WorkflowTemplateSpecJobsPrestoJobQueryList struct {
	// Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } }
	Queries []string `json:"queries" tf:"queries"`
}

func (*WorkflowTemplateSpecJobsPrestoJobQueryList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsPrestoJobQueryList.

func (*WorkflowTemplateSpecJobsPrestoJobQueryList) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsPrestoJobQueryListCodec

type WorkflowTemplateSpecJobsPrestoJobQueryListCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsPrestoJobQueryListCodec) Decode

func (WorkflowTemplateSpecJobsPrestoJobQueryListCodec) Encode

func (WorkflowTemplateSpecJobsPrestoJobQueryListCodec) IsEmpty

type WorkflowTemplateSpecJobsPysparkJob

type WorkflowTemplateSpecJobsPysparkJob struct {
	// Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
	// +optional
	ArchiveUris []string `json:"archiveUris,omitempty" tf:"archive_uris"`
	// Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
	// +optional
	Args []string `json:"args,omitempty" tf:"args"`
	// Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
	// +optional
	FileUris []string `json:"fileUris,omitempty" tf:"file_uris"`
	// Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// Optional. The runtime log config for job execution.
	// +optional
	LoggingConfig *WorkflowTemplateSpecJobsPysparkJobLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
	MainPythonFileURI *string `json:"mainPythonFileURI" tf:"main_python_file_uri"`
	// Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
	// Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
	// +optional
	PythonFileUris []string `json:"pythonFileUris,omitempty" tf:"python_file_uris"`
}

func (*WorkflowTemplateSpecJobsPysparkJob) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsPysparkJob.

func (*WorkflowTemplateSpecJobsPysparkJob) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsPysparkJobCodec

type WorkflowTemplateSpecJobsPysparkJobCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsPysparkJobCodec) Decode

func (WorkflowTemplateSpecJobsPysparkJobCodec) Encode

func (WorkflowTemplateSpecJobsPysparkJobCodec) IsEmpty

type WorkflowTemplateSpecJobsPysparkJobLoggingConfig

type WorkflowTemplateSpecJobsPysparkJobLoggingConfig struct {
	// The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
	// +optional
	DriverLogLevels *map[string]string `json:"driverLogLevels,omitempty" tf:"driver_log_levels"`
}

func (*WorkflowTemplateSpecJobsPysparkJobLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsPysparkJobLoggingConfig.

func (*WorkflowTemplateSpecJobsPysparkJobLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsPysparkJobLoggingConfigCodec

type WorkflowTemplateSpecJobsPysparkJobLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsPysparkJobLoggingConfigCodec) Decode

func (WorkflowTemplateSpecJobsPysparkJobLoggingConfigCodec) Encode

func (WorkflowTemplateSpecJobsPysparkJobLoggingConfigCodec) IsEmpty

type WorkflowTemplateSpecJobsScheduling

type WorkflowTemplateSpecJobsScheduling struct {
	// Optional. Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10.
	// +optional
	MaxFailuresPerHour *int64 `json:"maxFailuresPerHour,omitempty" tf:"max_failures_per_hour"`
	// Optional. Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240.
	// +optional
	MaxFailuresTotal *int64 `json:"maxFailuresTotal,omitempty" tf:"max_failures_total"`
}

func (*WorkflowTemplateSpecJobsScheduling) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsScheduling.

func (*WorkflowTemplateSpecJobsScheduling) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsSchedulingCodec

type WorkflowTemplateSpecJobsSchedulingCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsSchedulingCodec) Decode

func (WorkflowTemplateSpecJobsSchedulingCodec) Encode

func (WorkflowTemplateSpecJobsSchedulingCodec) IsEmpty

type WorkflowTemplateSpecJobsSparkJob

type WorkflowTemplateSpecJobsSparkJob struct {
	// Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
	// +optional
	ArchiveUris []string `json:"archiveUris,omitempty" tf:"archive_uris"`
	// Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
	// +optional
	Args []string `json:"args,omitempty" tf:"args"`
	// Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
	// +optional
	FileUris []string `json:"fileUris,omitempty" tf:"file_uris"`
	// Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// Optional. The runtime log config for job execution.
	// +optional
	LoggingConfig *WorkflowTemplateSpecJobsSparkJobLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in `jar_file_uris`.
	// +optional
	MainClass *string `json:"mainClass,omitempty" tf:"main_class"`
	// The HCFS URI of the jar file that contains the main class.
	// +optional
	MainJarFileURI *string `json:"mainJarFileURI,omitempty" tf:"main_jar_file_uri"`
	// Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
}

func (*WorkflowTemplateSpecJobsSparkJob) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsSparkJob.

func (*WorkflowTemplateSpecJobsSparkJob) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsSparkJobCodec

type WorkflowTemplateSpecJobsSparkJobCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsSparkJobCodec) Decode

func (WorkflowTemplateSpecJobsSparkJobCodec) Encode

func (WorkflowTemplateSpecJobsSparkJobCodec) IsEmpty

type WorkflowTemplateSpecJobsSparkJobLoggingConfig

type WorkflowTemplateSpecJobsSparkJobLoggingConfig struct {
	// The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
	// +optional
	DriverLogLevels *map[string]string `json:"driverLogLevels,omitempty" tf:"driver_log_levels"`
}

func (*WorkflowTemplateSpecJobsSparkJobLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsSparkJobLoggingConfig.

func (*WorkflowTemplateSpecJobsSparkJobLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsSparkJobLoggingConfigCodec

type WorkflowTemplateSpecJobsSparkJobLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsSparkJobLoggingConfigCodec) Decode

func (WorkflowTemplateSpecJobsSparkJobLoggingConfigCodec) Encode

func (WorkflowTemplateSpecJobsSparkJobLoggingConfigCodec) IsEmpty

type WorkflowTemplateSpecJobsSparkRJob

type WorkflowTemplateSpecJobsSparkRJob struct {
	// Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
	// +optional
	ArchiveUris []string `json:"archiveUris,omitempty" tf:"archive_uris"`
	// Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
	// +optional
	Args []string `json:"args,omitempty" tf:"args"`
	// Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
	// +optional
	FileUris []string `json:"fileUris,omitempty" tf:"file_uris"`
	// Optional. The runtime log config for job execution.
	// +optional
	LoggingConfig *WorkflowTemplateSpecJobsSparkRJobLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// Required. The HCFS URI of the main R file to use as the driver. Must be a .R file.
	MainRFileURI *string `json:"mainRFileURI" tf:"main_r_file_uri"`
	// Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
}

func (*WorkflowTemplateSpecJobsSparkRJob) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsSparkRJob.

func (*WorkflowTemplateSpecJobsSparkRJob) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsSparkRJobCodec

type WorkflowTemplateSpecJobsSparkRJobCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsSparkRJobCodec) Decode

func (WorkflowTemplateSpecJobsSparkRJobCodec) Encode

func (WorkflowTemplateSpecJobsSparkRJobCodec) IsEmpty

type WorkflowTemplateSpecJobsSparkRJobLoggingConfig

type WorkflowTemplateSpecJobsSparkRJobLoggingConfig struct {
	// The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
	// +optional
	DriverLogLevels *map[string]string `json:"driverLogLevels,omitempty" tf:"driver_log_levels"`
}

func (*WorkflowTemplateSpecJobsSparkRJobLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsSparkRJobLoggingConfig.

func (*WorkflowTemplateSpecJobsSparkRJobLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsSparkRJobLoggingConfigCodec

type WorkflowTemplateSpecJobsSparkRJobLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsSparkRJobLoggingConfigCodec) Decode

func (WorkflowTemplateSpecJobsSparkRJobLoggingConfigCodec) Encode

func (WorkflowTemplateSpecJobsSparkRJobLoggingConfigCodec) IsEmpty

type WorkflowTemplateSpecJobsSparkSQLJob

type WorkflowTemplateSpecJobsSparkSQLJob struct {
	// Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
	// +optional
	JarFileUris []string `json:"jarFileUris,omitempty" tf:"jar_file_uris"`
	// Optional. The runtime log config for job execution.
	// +optional
	LoggingConfig *WorkflowTemplateSpecJobsSparkSQLJobLoggingConfig `json:"loggingConfig,omitempty" tf:"logging_config"`
	// Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten.
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
	// The HCFS URI of the script that contains SQL queries.
	// +optional
	QueryFileURI *string `json:"queryFileURI,omitempty" tf:"query_file_uri"`
	// A list of queries.
	// +optional
	QueryList *WorkflowTemplateSpecJobsSparkSQLJobQueryList `json:"queryList,omitempty" tf:"query_list"`
	// Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET `name="value";`).
	// +optional
	ScriptVariables *map[string]string `json:"scriptVariables,omitempty" tf:"script_variables"`
}

func (*WorkflowTemplateSpecJobsSparkSQLJob) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsSparkSQLJob.

func (*WorkflowTemplateSpecJobsSparkSQLJob) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsSparkSQLJobCodec

type WorkflowTemplateSpecJobsSparkSQLJobCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsSparkSQLJobCodec) Decode

func (WorkflowTemplateSpecJobsSparkSQLJobCodec) Encode

func (WorkflowTemplateSpecJobsSparkSQLJobCodec) IsEmpty

type WorkflowTemplateSpecJobsSparkSQLJobLoggingConfig

type WorkflowTemplateSpecJobsSparkSQLJobLoggingConfig struct {
	// The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
	// +optional
	DriverLogLevels *map[string]string `json:"driverLogLevels,omitempty" tf:"driver_log_levels"`
}

func (*WorkflowTemplateSpecJobsSparkSQLJobLoggingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsSparkSQLJobLoggingConfig.

func (*WorkflowTemplateSpecJobsSparkSQLJobLoggingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsSparkSQLJobLoggingConfigCodec

type WorkflowTemplateSpecJobsSparkSQLJobLoggingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsSparkSQLJobLoggingConfigCodec) Decode

func (WorkflowTemplateSpecJobsSparkSQLJobLoggingConfigCodec) Encode

func (WorkflowTemplateSpecJobsSparkSQLJobLoggingConfigCodec) IsEmpty

type WorkflowTemplateSpecJobsSparkSQLJobQueryList

type WorkflowTemplateSpecJobsSparkSQLJobQueryList struct {
	// Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } }
	Queries []string `json:"queries" tf:"queries"`
}

func (*WorkflowTemplateSpecJobsSparkSQLJobQueryList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecJobsSparkSQLJobQueryList.

func (*WorkflowTemplateSpecJobsSparkSQLJobQueryList) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecJobsSparkSQLJobQueryListCodec

type WorkflowTemplateSpecJobsSparkSQLJobQueryListCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecJobsSparkSQLJobQueryListCodec) Decode

func (WorkflowTemplateSpecJobsSparkSQLJobQueryListCodec) Encode

func (WorkflowTemplateSpecJobsSparkSQLJobQueryListCodec) IsEmpty

type WorkflowTemplateSpecParameters

type WorkflowTemplateSpecParameters struct {
	// Optional. Brief description of the parameter. Must not exceed 1024 characters.
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	// Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a google.protobuf.FieldMask. For example, a field path that references the zone field of a workflow template's cluster selector would be specified as `placement.clusterSelector.zone`. Also, field paths can reference fields using the following syntax: * Values in maps can be referenced by key: * labels['key'] * placement.clusterSelector.clusterLabels['key'] * placement.managedCluster.labels['key'] * placement.clusterSelector.clusterLabels['key'] * jobs['step-id'].labels['key'] * Jobs in the jobs list can be referenced by step-id: * jobs['step-id'].hadoopJob.mainJarFileUri * jobs['step-id'].hiveJob.queryFileUri * jobs['step-id'].pySparkJob.mainPythonFileUri * jobs['step-id'].hadoopJob.jarFileUris[0] * jobs['step-id'].hadoopJob.archiveUris[0] * jobs['step-id'].hadoopJob.fileUris[0] * jobs['step-id'].pySparkJob.pythonFileUris[0] * Items in repeated fields can be referenced by a zero-based index: * jobs['step-id'].sparkJob.args[0] * Other examples: * jobs['step-id'].hadoopJob.properties['key'] * jobs['step-id'].hadoopJob.args[0] * jobs['step-id'].hiveJob.scriptVariables['key'] * jobs['step-id'].hadoopJob.mainJarFileUri * placement.clusterSelector.zone It may not be possible to parameterize maps and repeated fields in their entirety since only individual map values and individual items in repeated fields can be referenced. For example, the following field paths are invalid: - placement.clusterSelector.clusterLabels - jobs['step-id'].sparkJob.args
	Fields []string `json:"fields" tf:"fields"`
	// Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters.
	Name *string `json:"name" tf:"name"`
	// Optional. Validation rules to be applied to this parameter's value.
	// +optional
	Validation *WorkflowTemplateSpecParametersValidation `json:"validation,omitempty" tf:"validation"`
}

func (*WorkflowTemplateSpecParameters) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecParameters.

func (*WorkflowTemplateSpecParameters) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecParametersValidation

type WorkflowTemplateSpecParametersValidation struct {
	// Validation based on regular expressions.
	// +optional
	Regex *WorkflowTemplateSpecParametersValidationRegex `json:"regex,omitempty" tf:"regex"`
	// Validation based on a list of allowed values.
	// +optional
	Values *WorkflowTemplateSpecParametersValidationValues `json:"values,omitempty" tf:"values"`
}

func (*WorkflowTemplateSpecParametersValidation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecParametersValidation.

func (*WorkflowTemplateSpecParametersValidation) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecParametersValidationCodec

type WorkflowTemplateSpecParametersValidationCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecParametersValidationCodec) Decode

func (WorkflowTemplateSpecParametersValidationCodec) Encode

func (WorkflowTemplateSpecParametersValidationCodec) IsEmpty

type WorkflowTemplateSpecParametersValidationRegex

type WorkflowTemplateSpecParametersValidationRegex struct {
	// Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient).
	Regexes []string `json:"regexes" tf:"regexes"`
}

func (*WorkflowTemplateSpecParametersValidationRegex) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecParametersValidationRegex.

func (*WorkflowTemplateSpecParametersValidationRegex) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecParametersValidationRegexCodec

type WorkflowTemplateSpecParametersValidationRegexCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecParametersValidationRegexCodec) Decode

func (WorkflowTemplateSpecParametersValidationRegexCodec) Encode

func (WorkflowTemplateSpecParametersValidationRegexCodec) IsEmpty

type WorkflowTemplateSpecParametersValidationValues

type WorkflowTemplateSpecParametersValidationValues struct {
	// Required. List of allowed values for the parameter.
	Values []string `json:"values" tf:"values"`
}

func (*WorkflowTemplateSpecParametersValidationValues) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecParametersValidationValues.

func (*WorkflowTemplateSpecParametersValidationValues) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecParametersValidationValuesCodec

type WorkflowTemplateSpecParametersValidationValuesCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecParametersValidationValuesCodec) Decode

func (WorkflowTemplateSpecParametersValidationValuesCodec) Encode

func (WorkflowTemplateSpecParametersValidationValuesCodec) IsEmpty

type WorkflowTemplateSpecPlacement

type WorkflowTemplateSpecPlacement struct {
	// Optional. A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted.
	// +optional
	ClusterSelector *WorkflowTemplateSpecPlacementClusterSelector `json:"clusterSelector,omitempty" tf:"cluster_selector"`
	// A cluster that is managed by the workflow.
	// +optional
	ManagedCluster *WorkflowTemplateSpecPlacementManagedCluster `json:"managedCluster,omitempty" tf:"managed_cluster"`
}

func (*WorkflowTemplateSpecPlacement) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacement.

func (*WorkflowTemplateSpecPlacement) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementClusterSelector

type WorkflowTemplateSpecPlacementClusterSelector struct {
	// Required. The cluster labels. Cluster must have all labels to match.
	ClusterLabels *map[string]string `json:"clusterLabels" tf:"cluster_labels"`
	// Optional. The zone where workflow process executes. This parameter does not affect the selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used.
	// +optional
	Zone *string `json:"zone,omitempty" tf:"zone"`
}

func (*WorkflowTemplateSpecPlacementClusterSelector) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementClusterSelector.

func (*WorkflowTemplateSpecPlacementClusterSelector) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementClusterSelectorCodec

type WorkflowTemplateSpecPlacementClusterSelectorCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementClusterSelectorCodec) Decode

func (WorkflowTemplateSpecPlacementClusterSelectorCodec) Encode

func (WorkflowTemplateSpecPlacementClusterSelectorCodec) IsEmpty

type WorkflowTemplateSpecPlacementCodec

type WorkflowTemplateSpecPlacementCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementCodec) Decode

func (WorkflowTemplateSpecPlacementCodec) Encode

func (WorkflowTemplateSpecPlacementCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedCluster

type WorkflowTemplateSpecPlacementManagedCluster struct {
	// Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters.
	ClusterName *string `json:"clusterName" tf:"cluster_name"`
	// Required. The cluster configuration.
	Config *WorkflowTemplateSpecPlacementManagedClusterConfig `json:"config" tf:"config"`
	// Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: p{Ll}p{Lo}{0,62} Label values must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: [p{Ll}p{Lo}p{N}_-]{0,63} No more than 32 labels can be associated with a given cluster.
	// +optional
	Labels *map[string]string `json:"labels,omitempty" tf:"labels"`
}

func (*WorkflowTemplateSpecPlacementManagedCluster) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedCluster.

func (*WorkflowTemplateSpecPlacementManagedCluster) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterCodec

type WorkflowTemplateSpecPlacementManagedClusterCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfig

type WorkflowTemplateSpecPlacementManagedClusterConfig struct {
	// Optional. Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset.
	// +optional
	AutoscalingConfig *WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfig `json:"autoscalingConfig,omitempty" tf:"autoscaling_config"`
	// Optional. Encryption settings for the cluster.
	// +optional
	EncryptionConfig *WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfig `json:"encryptionConfig,omitempty" tf:"encryption_config"`
	// Optional. Port/endpoint configuration for this cluster
	// +optional
	EndpointConfig *WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfig `json:"endpointConfig,omitempty" tf:"endpoint_config"`
	// Optional. The shared Compute Engine config settings for all instances in a cluster.
	// +optional
	GceClusterConfig *WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfig `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config"`
	// Optional. Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's `role` metadata to run an executable on a master or worker node, as shown below using `curl` (you can also use `wget`): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if [[ "${ROLE}" == 'Master' ]]; then ... master specific actions ... else ... worker specific actions ... fi
	// +optional
	InitializationActions []WorkflowTemplateSpecPlacementManagedClusterConfigInitializationActions `json:"initializationActions,omitempty" tf:"initialization_actions"`
	// Optional. Lifecycle setting for the cluster.
	// +optional
	LifecycleConfig *WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfig `json:"lifecycleConfig,omitempty" tf:"lifecycle_config"`
	// Optional. The Compute Engine config settings for the master instance in a cluster.
	// +optional
	MasterConfig *WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfig `json:"masterConfig,omitempty" tf:"master_config"`
	// Optional. The Compute Engine config settings for additional worker instances in a cluster.
	// +optional
	SecondaryWorkerConfig *WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfig `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config"`
	// Optional. Security settings for the cluster.
	// +optional
	SecurityConfig *WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfig `json:"securityConfig,omitempty" tf:"security_config"`
	// Optional. The config settings for software inside the cluster.
	// +optional
	SoftwareConfig *WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfig `json:"softwareConfig,omitempty" tf:"software_config"`
	// Optional. A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see [Dataproc staging bucket](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). **This field requires a Cloud Storage bucket name, not a URI to a Cloud Storage bucket.**
	// +optional
	StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket"`
	// Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. **This field requires a Cloud Storage bucket name, not a URI to a Cloud Storage bucket.**
	// +optional
	TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket"`
	// Optional. The Compute Engine config settings for worker instances in a cluster.
	// +optional
	WorkerConfig *WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfig `json:"workerConfig,omitempty" tf:"worker_config"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfig struct {
	// Optional. The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` Note that the policy must be in the same project and Dataproc region.
	// +optional
	Policy *string `json:"policy,omitempty" tf:"policy"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigAutoscalingConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfig struct {
	// Optional. The Cloud KMS key name to use for PD disk encryption for all instances in the cluster.
	// +optional
	GcePdKmsKeyName *string `json:"gcePdKmsKeyName,omitempty" tf:"gce_pd_kms_key_name"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigEncryptionConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfig struct {
	// Optional. If true, enable http access to specific ports on the cluster from external sources. Defaults to false.
	// +optional
	EnableHTTPPortAccess *bool `json:"enableHTTPPortAccess,omitempty" tf:"enable_http_port_access"`
	// Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true.
	// +optional
	HttpPorts *map[string]string `json:"httpPorts,omitempty" tf:"http_ports"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigEndpointConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfig struct {
	// Optional. If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This `internal_ip_only` restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses.
	// +optional
	InternalIPOnly *bool `json:"internalIPOnly,omitempty" tf:"internal_ip_only"`
	// The Compute Engine metadata entries to add to all instances (see [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
	// +optional
	Metadata *map[string]string `json:"metadata,omitempty" tf:"metadata"`
	// Optional. The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither `network_uri` nor `subnetwork_uri` is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see [Using Subnetworks](https://cloud.google.com/compute/docs/subnetworks) for more information). A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` * `projects/[project_id]/regions/global/default` * `default`
	// +optional
	Network *string `json:"network,omitempty" tf:"network"`
	// Optional. Node Group Affinity for sole-tenant clusters.
	// +optional
	NodeGroupAffinity *WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity"`
	// Optional. The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL
	// +optional
	PrivateIpv6GoogleAccess *string `json:"privateIpv6GoogleAccess,omitempty" tf:"private_ipv6_google_access"`
	// Optional. Reservation Affinity for consuming Zonal reservation.
	// +optional
	ReservationAffinity *WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinity `json:"reservationAffinity,omitempty" tf:"reservation_affinity"`
	// Optional. The [Dataproc service account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc) (also see [VM Data Plane identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity)) used by Dataproc cluster VM instances to access Google Cloud Platform services. If not specified, the [Compute Engine default service account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used.
	// +optional
	ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account"`
	// Optional. The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control
	// +optional
	ServiceAccountScopes []string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes"`
	// Optional. The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0` * `projects/[project_id]/regions/us-east1/subnetworks/sub0` * `sub0`
	// +optional
	Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork"`
	// The Compute Engine tags to add to all instances (see [Tagging instances](https://cloud.google.com/compute/docs/label-or-tag-resources#tags)).
	// +optional
	Tags []string `json:"tags,omitempty" tf:"tags"`
	// Optional. The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` * `projects/[project_id]/zones/[zone]` * `us-central1-f`
	// +optional
	Zone *string `json:"zone,omitempty" tf:"zone"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity struct {
	// Required. The URI of a sole-tenant [node group resource](https://cloud.google.com/compute/docs/reference/rest/v1/nodeGroups) that the cluster will be created on. A full URL, partial URI, or node group name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-central1-a/nodeGroups/node-group-1` * `projects/[project_id]/zones/us-central1-a/nodeGroups/node-group-1` * `node-group-1`
	NodeGroup *string `json:"nodeGroup" tf:"node_group"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinity

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinity struct {
	// Optional. Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION
	// +optional
	ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type"`
	// Optional. Corresponds to the label key of reservation resource.
	// +optional
	Key *string `json:"key,omitempty" tf:"key"`
	// Optional. Corresponds to the label values of reservation resource.
	// +optional
	Values []string `json:"values,omitempty" tf:"values"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinity) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinity.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinity) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinityCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinityCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinityCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinityCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigGceClusterConfigReservationAffinityCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigInitializationActions

type WorkflowTemplateSpecPlacementManagedClusterConfigInitializationActions struct {
	// Required. Cloud Storage URI of executable file.
	ExecutableFile *string `json:"executableFile" tf:"executable_file"`
	// Optional. Amount of time executable has to complete. Default is 10 minutes (see JSON representation of [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period.
	// +optional
	ExecutionTimeout *string `json:"executionTimeout,omitempty" tf:"execution_timeout"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigInitializationActions) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigInitializationActions.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigInitializationActions) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfig struct {
	// Optional. The time when cluster will be auto-deleted (see JSON representation of [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
	// +optional
	AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time"`
	// Optional. The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)).
	// +optional
	AutoDeleteTtl *string `json:"autoDeleteTtl,omitempty" tf:"auto_delete_ttl"`
	// Optional. The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)).
	// +optional
	IdleDeleteTtl *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl"`
	// Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
	// +optional
	IdleStartTime *string `json:"idleStartTime,omitempty" tf:"idle_start_time"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigLifecycleConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfig struct {
	// Optional. The Compute Engine accelerator configuration for these instances.
	// +optional
	Accelerators []WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigAccelerators `json:"accelerators,omitempty" tf:"accelerators"`
	// Optional. Disk option config settings.
	// +optional
	DiskConfig *WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfig `json:"diskConfig,omitempty" tf:"disk_config"`
	// Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id]` * `projects/[project_id]/global/images/[image-id]` * `image-id` Image family examples. Dataproc will use the most recent image from the family: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name]` * `projects/[project_id]/global/images/family/[custom-image-family-name]` If the URI is unspecified, it will be inferred from `SoftwareConfig.image_version` or the system default.
	// +optional
	Image *string `json:"image,omitempty" tf:"image"`
	// Output only. The list of instance names. Dataproc derives the names from `cluster_name`, `num_instances`, and the instance group.
	// +optional
	InstanceNames []string `json:"instanceNames,omitempty" tf:"instance_names"`
	// Output only. Specifies that this instance group contains preemptible instances.
	// +optional
	IsPreemptible *bool `json:"isPreemptible,omitempty" tf:"is_preemptible"`
	// Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `n1-standard-2` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, `n1-standard-2`.
	// +optional
	MachineType *string `json:"machineType,omitempty" tf:"machine_type"`
	// Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
	// +optional
	ManagedGroupConfig []WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigManagedGroupConfig `json:"managedGroupConfig,omitempty" tf:"managed_group_config"`
	// Optional. Specifies the minimum cpu platform for the Instance Group. See [Dataproc -> Minimum CPU Platform](https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu).
	// +optional
	MinCPUPlatform *string `json:"minCPUPlatform,omitempty" tf:"min_cpu_platform"`
	// Optional. The number of VM instances in the instance group. For [HA cluster](/dataproc/docs/concepts/configuring-clusters/high-availability) [master_config](#FIELDS.master_config) groups, **must be set to 3**. For standard cluster [master_config](#FIELDS.master_config) groups, **must be set to 1**.
	// +optional
	NumInstances *int64 `json:"numInstances,omitempty" tf:"num_instances"`
	// Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is `NON_PREEMPTIBLE`. This default cannot be changed. The default value for secondary instances is `PREEMPTIBLE`. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
	// +optional
	Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigAccelerators

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigAccelerators struct {
	// The number of the accelerator cards of this type exposed to this instance.
	// +optional
	AcceleratorCount *int64 `json:"acceleratorCount,omitempty" tf:"accelerator_count"`
	// Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See [Compute Engine AcceleratorTypes](https://cloud.google.com/compute/docs/reference/beta/acceleratorTypes). Examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `nvidia-tesla-k80` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, `nvidia-tesla-k80`.
	// +optional
	AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigAccelerators) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigAccelerators.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigAccelerators) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfig struct {
	// Optional. Size in GB of the boot disk (default is 500GB).
	// +optional
	BootDiskSizeGb *int64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb"`
	// Optional. Type of the boot disk (default is "pd-standard"). Valid values: "pd-balanced" (Persistent Disk Balanced Solid State Drive), "pd-ssd" (Persistent Disk Solid State Drive), or "pd-standard" (Persistent Disk Hard Disk Drive). See [Disk types](https://cloud.google.com/compute/docs/disks#disk-types).
	// +optional
	BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type"`
	// Optional. Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
	// +optional
	NumLocalSsds *int64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigDiskConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigManagedGroupConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigManagedGroupConfig struct {
	// Output only. The name of the Instance Group Manager for this group.
	// +optional
	InstanceGroupManagerName *string `json:"instanceGroupManagerName,omitempty" tf:"instance_group_manager_name"`
	// Output only. The name of the Instance Template used for the Managed Instance Group.
	// +optional
	InstanceTemplateName *string `json:"instanceTemplateName,omitempty" tf:"instance_template_name"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigManagedGroupConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigManagedGroupConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigMasterConfigManagedGroupConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfig struct {
	// Optional. The Compute Engine accelerator configuration for these instances.
	// +optional
	Accelerators []WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigAccelerators `json:"accelerators,omitempty" tf:"accelerators"`
	// Optional. Disk option config settings.
	// +optional
	DiskConfig *WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig `json:"diskConfig,omitempty" tf:"disk_config"`
	// Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id]` * `projects/[project_id]/global/images/[image-id]` * `image-id` Image family examples. Dataproc will use the most recent image from the family: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name]` * `projects/[project_id]/global/images/family/[custom-image-family-name]` If the URI is unspecified, it will be inferred from `SoftwareConfig.image_version` or the system default.
	// +optional
	Image *string `json:"image,omitempty" tf:"image"`
	// Output only. The list of instance names. Dataproc derives the names from `cluster_name`, `num_instances`, and the instance group.
	// +optional
	InstanceNames []string `json:"instanceNames,omitempty" tf:"instance_names"`
	// Output only. Specifies that this instance group contains preemptible instances.
	// +optional
	IsPreemptible *bool `json:"isPreemptible,omitempty" tf:"is_preemptible"`
	// Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `n1-standard-2` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, `n1-standard-2`.
	// +optional
	MachineType *string `json:"machineType,omitempty" tf:"machine_type"`
	// Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
	// +optional
	ManagedGroupConfig []WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig `json:"managedGroupConfig,omitempty" tf:"managed_group_config"`
	// Optional. Specifies the minimum cpu platform for the Instance Group. See [Dataproc -> Minimum CPU Platform](https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu).
	// +optional
	MinCPUPlatform *string `json:"minCPUPlatform,omitempty" tf:"min_cpu_platform"`
	// Optional. The number of VM instances in the instance group. For [HA cluster](/dataproc/docs/concepts/configuring-clusters/high-availability) [master_config](#FIELDS.master_config) groups, **must be set to 3**. For standard cluster [master_config](#FIELDS.master_config) groups, **must be set to 1**.
	// +optional
	NumInstances *int64 `json:"numInstances,omitempty" tf:"num_instances"`
	// Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is `NON_PREEMPTIBLE`. This default cannot be changed. The default value for secondary instances is `PREEMPTIBLE`. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
	// +optional
	Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigAccelerators

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigAccelerators struct {
	// The number of the accelerator cards of this type exposed to this instance.
	// +optional
	AcceleratorCount *int64 `json:"acceleratorCount,omitempty" tf:"accelerator_count"`
	// Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See [Compute Engine AcceleratorTypes](https://cloud.google.com/compute/docs/reference/beta/acceleratorTypes). Examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `nvidia-tesla-k80` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, `nvidia-tesla-k80`.
	// +optional
	AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigAccelerators) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigAccelerators.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigAccelerators) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig struct {
	// Optional. Size in GB of the boot disk (default is 500GB).
	// +optional
	BootDiskSizeGb *int64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb"`
	// Optional. Type of the boot disk (default is "pd-standard"). Valid values: "pd-balanced" (Persistent Disk Balanced Solid State Drive), "pd-ssd" (Persistent Disk Solid State Drive), or "pd-standard" (Persistent Disk Hard Disk Drive). See [Disk types](https://cloud.google.com/compute/docs/disks#disk-types).
	// +optional
	BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type"`
	// Optional. Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
	// +optional
	NumLocalSsds *int64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig struct {
	// Output only. The name of the Instance Group Manager for this group.
	// +optional
	InstanceGroupManagerName *string `json:"instanceGroupManagerName,omitempty" tf:"instance_group_manager_name"`
	// Output only. The name of the Instance Template used for the Managed Instance Group.
	// +optional
	InstanceTemplateName *string `json:"instanceTemplateName,omitempty" tf:"instance_template_name"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfig struct {
	// Optional. Kerberos related configuration.
	// +optional
	KerberosConfig *WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfig `json:"kerberosConfig,omitempty" tf:"kerberos_config"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfig struct {
	// Optional. The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
	// +optional
	CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server"`
	// Optional. The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship.
	// +optional
	CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc"`
	// Optional. The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust.
	// +optional
	CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm"`
	// Optional. The Cloud Storage URI of a KMS encrypted file containing the shared password between the on-cluster Kerberos realm and the remote trusted realm, in a cross realm trust relationship.
	// +optional
	CrossRealmTrustSharedPassword *string `json:"crossRealmTrustSharedPassword,omitempty" tf:"cross_realm_trust_shared_password"`
	// Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster.
	// +optional
	EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos"`
	// Optional. The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database.
	// +optional
	KdcDbKey *string `json:"kdcDbKey,omitempty" tf:"kdc_db_key"`
	// Optional. The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc.
	// +optional
	KeyPassword *string `json:"keyPassword,omitempty" tf:"key_password"`
	// Optional. The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
	// +optional
	Keystore *string `json:"keystore,omitempty" tf:"keystore"`
	// Optional. The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc.
	// +optional
	KeystorePassword *string `json:"keystorePassword,omitempty" tf:"keystore_password"`
	// Optional. The uri of the KMS key used to encrypt various sensitive files.
	// +optional
	KmsKey *string `json:"kmsKey,omitempty" tf:"kms_key"`
	// Optional. The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm.
	// +optional
	Realm *string `json:"realm,omitempty" tf:"realm"`
	// Optional. The Cloud Storage URI of a KMS encrypted file containing the root principal password.
	// +optional
	RootPrincipalPassword *string `json:"rootPrincipalPassword,omitempty" tf:"root_principal_password"`
	// Optional. The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used.
	// +optional
	TgtLifetimeHours *int64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours"`
	// Optional. The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate.
	// +optional
	Truststore *string `json:"truststore,omitempty" tf:"truststore"`
	// Optional. The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc.
	// +optional
	TruststorePassword *string `json:"truststorePassword,omitempty" tf:"truststore_password"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigSecurityConfigKerberosConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfig struct {
	// Optional. The version of software inside the cluster. It must be one of the supported [Dataproc Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions), such as "1.2" (including a subminor version, such as "1.2.29"), or the ["preview" version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). If unspecified, it defaults to the latest Debian version.
	// +optional
	ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version"`
	// Optional. The set of components to activate on the cluster.
	// +optional
	OptionalComponents []string `json:"optionalComponents,omitempty" tf:"optional_components"`
	// Optional. The properties to set on daemon config files. Property keys are specified in `prefix:property` format, for example `core:hadoop.tmp.dir`. The following are supported prefixes and their mappings: * capacity-scheduler: `capacity-scheduler.xml` * core: `core-site.xml` * distcp: `distcp-default.xml` * hdfs: `hdfs-site.xml` * hive: `hive-site.xml` * mapred: `mapred-site.xml` * pig: `pig.properties` * spark: `spark-defaults.conf` * yarn: `yarn-site.xml` For more information, see [Cluster properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties).
	// +optional
	Properties *map[string]string `json:"properties,omitempty" tf:"properties"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigSoftwareConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfig struct {
	// Optional. The Compute Engine accelerator configuration for these instances.
	// +optional
	Accelerators []WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigAccelerators `json:"accelerators,omitempty" tf:"accelerators"`
	// Optional. Disk option config settings.
	// +optional
	DiskConfig *WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfig `json:"diskConfig,omitempty" tf:"disk_config"`
	// Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id]` * `projects/[project_id]/global/images/[image-id]` * `image-id` Image family examples. Dataproc will use the most recent image from the family: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name]` * `projects/[project_id]/global/images/family/[custom-image-family-name]` If the URI is unspecified, it will be inferred from `SoftwareConfig.image_version` or the system default.
	// +optional
	Image *string `json:"image,omitempty" tf:"image"`
	// Output only. The list of instance names. Dataproc derives the names from `cluster_name`, `num_instances`, and the instance group.
	// +optional
	InstanceNames []string `json:"instanceNames,omitempty" tf:"instance_names"`
	// Output only. Specifies that this instance group contains preemptible instances.
	// +optional
	IsPreemptible *bool `json:"isPreemptible,omitempty" tf:"is_preemptible"`
	// Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `n1-standard-2` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, `n1-standard-2`.
	// +optional
	MachineType *string `json:"machineType,omitempty" tf:"machine_type"`
	// Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups.
	// +optional
	ManagedGroupConfig []WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigManagedGroupConfig `json:"managedGroupConfig,omitempty" tf:"managed_group_config"`
	// Optional. Specifies the minimum cpu platform for the Instance Group. See [Dataproc -> Minimum CPU Platform](https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu).
	// +optional
	MinCPUPlatform *string `json:"minCPUPlatform,omitempty" tf:"min_cpu_platform"`
	// Optional. The number of VM instances in the instance group. For [HA cluster](/dataproc/docs/concepts/configuring-clusters/high-availability) [master_config](#FIELDS.master_config) groups, **must be set to 3**. For standard cluster [master_config](#FIELDS.master_config) groups, **must be set to 1**.
	// +optional
	NumInstances *int64 `json:"numInstances,omitempty" tf:"num_instances"`
	// Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is `NON_PREEMPTIBLE`. This default cannot be changed. The default value for secondary instances is `PREEMPTIBLE`. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE
	// +optional
	Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigAccelerators

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigAccelerators struct {
	// The number of the accelerator cards of this type exposed to this instance.
	// +optional
	AcceleratorCount *int64 `json:"acceleratorCount,omitempty" tf:"accelerator_count"`
	// Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See [Compute Engine AcceleratorTypes](https://cloud.google.com/compute/docs/reference/beta/acceleratorTypes). Examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `nvidia-tesla-k80` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, `nvidia-tesla-k80`.
	// +optional
	AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigAccelerators) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigAccelerators.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigAccelerators) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfig struct {
	// Optional. Size in GB of the boot disk (default is 500GB).
	// +optional
	BootDiskSizeGb *int64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb"`
	// Optional. Type of the boot disk (default is "pd-standard"). Valid values: "pd-balanced" (Persistent Disk Balanced Solid State Drive), "pd-ssd" (Persistent Disk Solid State Drive), or "pd-standard" (Persistent Disk Hard Disk Drive). See [Disk types](https://cloud.google.com/compute/docs/disks#disk-types).
	// +optional
	BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type"`
	// Optional. Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries.
	// +optional
	NumLocalSsds *int64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfigCodec

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfigCodec struct {
}

+k8s:deepcopy-gen=false

func (WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfigCodec) Decode

func (WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfigCodec) Encode

func (WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigDiskConfigCodec) IsEmpty

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigManagedGroupConfig

type WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigManagedGroupConfig struct {
	// Output only. The name of the Instance Group Manager for this group.
	// +optional
	InstanceGroupManagerName *string `json:"instanceGroupManagerName,omitempty" tf:"instance_group_manager_name"`
	// Output only. The name of the Instance Template used for the Managed Instance Group.
	// +optional
	InstanceTemplateName *string `json:"instanceTemplateName,omitempty" tf:"instance_template_name"`
}

func (*WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigManagedGroupConfig) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigManagedGroupConfig.

func (*WorkflowTemplateSpecPlacementManagedClusterConfigWorkerConfigManagedGroupConfig) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateSpecResource

type WorkflowTemplateSpecResource struct {
	Timeouts *base.ResourceTimeout `json:"timeouts,omitempty" tf:"timeouts"`

	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// Output only. The time template was created.
	// +optional
	CreateTime *string `json:"createTime,omitempty" tf:"create_time"`
	// Optional. Timeout duration for the DAG of jobs, expressed in seconds (see [JSON representation of duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a [managed cluster](/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted.
	// +optional
	DagTimeout *string `json:"dagTimeout,omitempty" tf:"dag_timeout"`
	// Required. The Directed Acyclic Graph of Jobs to submit.
	Jobs []WorkflowTemplateSpecJobs `json:"jobs" tf:"jobs"`
	// Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label **keys** must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a template.
	// +optional
	Labels *map[string]string `json:"labels,omitempty" tf:"labels"`
	// The location for the resource
	Location *string `json:"location" tf:"location"`
	// Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For `projects.regions.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * For `projects.locations.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`
	Name *string `json:"name" tf:"name"`
	// Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
	// +optional
	Parameters []WorkflowTemplateSpecParameters `json:"parameters,omitempty" tf:"parameters"`
	// Required. WorkflowTemplate scheduling information.
	Placement *WorkflowTemplateSpecPlacement `json:"placement" tf:"placement"`
	// The project for the resource
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// Output only. The time template was last updated.
	// +optional
	UpdateTime *string `json:"updateTime,omitempty" tf:"update_time"`
	// Output only. The current version of this workflow template.
	// +optional
	// Deprecated
	Version *int64 `json:"version,omitempty" tf:"version"`
}

func (*WorkflowTemplateSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpecResource.

func (*WorkflowTemplateSpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type WorkflowTemplateStatus

type WorkflowTemplateStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*WorkflowTemplateStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateStatus.

func (*WorkflowTemplateStatus) DeepCopyInto

func (in *WorkflowTemplateStatus) DeepCopyInto(out *WorkflowTemplateStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL