v1alpha1

package
v0.5.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 8, 2022 License: Apache-2.0 Imports: 16 Imported by: 0

Documentation

Overview

+groupName=bigquery.google.kubeform.com

Index

Constants

This section is empty.

Variables

View Source
var (
	// TODO: move SchemeBuilder with zz_generated.deepcopy.go to k8s.io/api.
	// localSchemeBuilder and AddToScheme will stay in k8s.io/kubernetes.
	SchemeBuilder runtime.SchemeBuilder

	AddToScheme = localSchemeBuilder.AddToScheme
)
View Source
var SchemeGroupVersion = schema.GroupVersion{Group: bigquery.GroupName, Version: "v1alpha1"}

Functions

func GetDecoder

func GetDecoder() map[string]jsoniter.ValDecoder

func GetEncoder

func GetEncoder() map[string]jsoniter.ValEncoder

func Kind

func Kind(kind string) schema.GroupKind

Kind takes an unqualified kind and returns a Group qualified GroupKind

func Resource

func Resource(resource string) schema.GroupResource

Resource takes an unqualified resource and returns a Group qualified GroupResource

Types

type DataTransferConfig

type DataTransferConfig struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              DataTransferConfigSpec   `json:"spec,omitempty"`
	Status            DataTransferConfigStatus `json:"status,omitempty"`
}

func (*DataTransferConfig) DeepCopy

func (in *DataTransferConfig) DeepCopy() *DataTransferConfig

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfig.

func (*DataTransferConfig) DeepCopyInto

func (in *DataTransferConfig) DeepCopyInto(out *DataTransferConfig)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataTransferConfig) DeepCopyObject

func (in *DataTransferConfig) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataTransferConfig) SetupWebhookWithManager

func (r *DataTransferConfig) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DataTransferConfig) ValidateCreate

func (r *DataTransferConfig) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DataTransferConfig) ValidateDelete

func (r *DataTransferConfig) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*DataTransferConfig) ValidateUpdate

func (r *DataTransferConfig) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type DataTransferConfigList

type DataTransferConfigList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of DataTransferConfig CRD objects
	Items []DataTransferConfig `json:"items,omitempty"`
}

DataTransferConfigList is a list of DataTransferConfigs

func (*DataTransferConfigList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigList.

func (*DataTransferConfigList) DeepCopyInto

func (in *DataTransferConfigList) DeepCopyInto(out *DataTransferConfigList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataTransferConfigList) DeepCopyObject

func (in *DataTransferConfigList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type DataTransferConfigSpec

type DataTransferConfigSpec struct {
	State *DataTransferConfigSpecResource `json:"state,omitempty" tf:"-"`

	Resource DataTransferConfigSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	SecretRef *core.LocalObjectReference `json:"secretRef,omitempty" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*DataTransferConfigSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigSpec.

func (*DataTransferConfigSpec) DeepCopyInto

func (in *DataTransferConfigSpec) DeepCopyInto(out *DataTransferConfigSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DataTransferConfigSpecEmailPreferences

type DataTransferConfigSpecEmailPreferences struct {
	// If true, email notifications will be sent on transfer run failures.
	EnableFailureEmail *bool `json:"enableFailureEmail" tf:"enable_failure_email"`
}

func (*DataTransferConfigSpecEmailPreferences) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigSpecEmailPreferences.

func (*DataTransferConfigSpecEmailPreferences) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DataTransferConfigSpecEmailPreferencesCodec

type DataTransferConfigSpecEmailPreferencesCodec struct {
}

+k8s:deepcopy-gen=false

func (DataTransferConfigSpecEmailPreferencesCodec) Decode

func (DataTransferConfigSpecEmailPreferencesCodec) Encode

func (DataTransferConfigSpecEmailPreferencesCodec) IsEmpty

type DataTransferConfigSpecResource

type DataTransferConfigSpecResource struct {
	Timeouts *base.ResourceTimeout `json:"timeouts,omitempty" tf:"timeouts"`

	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// The number of days to look back to automatically refresh the data.
	// For example, if dataRefreshWindowDays = 10, then every day BigQuery
	// reingests data for [today-10, today-1], rather than ingesting data for
	// just [today-1]. Only valid if the data source supports the feature.
	// Set the value to 0 to use the default value.
	// +optional
	DataRefreshWindowDays *int64 `json:"dataRefreshWindowDays,omitempty" tf:"data_refresh_window_days"`
	// The data source id. Cannot be changed once the transfer config is created.
	DataSourceID *string `json:"dataSourceID" tf:"data_source_id"`
	// The BigQuery target dataset id.
	// +optional
	DestinationDatasetID *string `json:"destinationDatasetID,omitempty" tf:"destination_dataset_id"`
	// When set to true, no runs are scheduled for a given transfer.
	// +optional
	Disabled *bool `json:"disabled,omitempty" tf:"disabled"`
	// The user specified display name for the transfer config.
	DisplayName *string `json:"displayName" tf:"display_name"`
	// Email notifications will be sent according to these preferences to the
	// email address of the user who owns this transfer config.
	// +optional
	EmailPreferences *DataTransferConfigSpecEmailPreferences `json:"emailPreferences,omitempty" tf:"email_preferences"`
	// The geographic location where the transfer config should reside.
	// Examples: US, EU, asia-northeast1. The default value is US.
	// +optional
	Location *string `json:"location,omitempty" tf:"location"`
	// The resource name of the transfer config. Transfer config names have the
	// form projects/{projectId}/locations/{location}/transferConfigs/{configId}.
	// Where configId is usually a uuid, but this is not required.
	// The name is ignored when creating a transfer config.
	// +optional
	Name *string `json:"name,omitempty" tf:"name"`
	// Pub/Sub topic where notifications will be sent after transfer runs
	// associated with this transfer config finish.
	// +optional
	NotificationPubsubTopic *string `json:"notificationPubsubTopic,omitempty" tf:"notification_pubsub_topic"`
	// Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer'
	// section for each data source. For example the parameters for Cloud Storage transfers are listed here:
	// https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
	Params *map[string]string `json:"params" tf:"params"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// Data transfer schedule. If the data source does not support a custom
	// schedule, this should be empty. If it is empty, the default value for
	// the data source will be used. The specified times are in UTC. Examples
	// of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan,
	// jun 13:15, and first sunday of quarter 00:00. See more explanation
	// about the format here:
	// https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
	// NOTE: the granularity should be at least 8 hours, or less frequent.
	// +optional
	Schedule *string `json:"schedule,omitempty" tf:"schedule"`
	// Options customizing the data transfer schedule.
	// +optional
	ScheduleOptions *DataTransferConfigSpecScheduleOptions `json:"scheduleOptions,omitempty" tf:"schedule_options"`
	// Different parameters are configured primarily using the the 'params' field on this
	// resource. This block contains the parameters which contain secrets or passwords so that they can be marked
	// sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key
	// in the 'params' map in the api request.
	//
	// Credentials may not be specified in both locations and will cause an error. Changing from one location
	// to a different credential configuration in the config will require an apply to update state.
	// +optional
	SensitiveParams *DataTransferConfigSpecSensitiveParams `json:"sensitiveParams,omitempty" tf:"sensitive_params"`
	// Service account email. If this field is set, transfer config will
	// be created with this service account credentials. It requires that
	// requesting user calling this API has permissions to act as this service account.
	// +optional
	ServiceAccountName *string `json:"serviceAccountName,omitempty" tf:"service_account_name"`
}

func (*DataTransferConfigSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigSpecResource.

func (*DataTransferConfigSpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DataTransferConfigSpecScheduleOptions

type DataTransferConfigSpecScheduleOptions struct {
	// If true, automatic scheduling of data transfer runs for this
	// configuration will be disabled. The runs can be started on ad-hoc
	// basis using transferConfigs.startManualRuns API. When automatic
	// scheduling is disabled, the TransferConfig.schedule field will
	// be ignored.
	// +optional
	DisableAutoScheduling *bool `json:"disableAutoScheduling,omitempty" tf:"disable_auto_scheduling"`
	// Defines time to stop scheduling transfer runs. A transfer run cannot be
	// scheduled at or after the end time. The end time can be changed at any
	// moment. The time when a data transfer can be triggered manually is not
	// limited by this option.
	// +optional
	EndTime *string `json:"endTime,omitempty" tf:"end_time"`
	// Specifies time to start scheduling transfer runs. The first run will be
	// scheduled at or after the start time according to a recurrence pattern
	// defined in the schedule string. The start time can be changed at any
	// moment. The time when a data transfer can be triggered manually is not
	// limited by this option.
	// +optional
	StartTime *string `json:"startTime,omitempty" tf:"start_time"`
}

func (*DataTransferConfigSpecScheduleOptions) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigSpecScheduleOptions.

func (*DataTransferConfigSpecScheduleOptions) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DataTransferConfigSpecScheduleOptionsCodec

type DataTransferConfigSpecScheduleOptionsCodec struct {
}

+k8s:deepcopy-gen=false

func (DataTransferConfigSpecScheduleOptionsCodec) Decode

func (DataTransferConfigSpecScheduleOptionsCodec) Encode

func (DataTransferConfigSpecScheduleOptionsCodec) IsEmpty

type DataTransferConfigSpecSensitiveParams

type DataTransferConfigSpecSensitiveParams struct {
	// The Secret Access Key of the AWS account transferring data from.
	SecretAccessKey *string `json:"-" sensitive:"true" tf:"secret_access_key"`
}

func (*DataTransferConfigSpecSensitiveParams) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigSpecSensitiveParams.

func (*DataTransferConfigSpecSensitiveParams) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DataTransferConfigSpecSensitiveParamsCodec

type DataTransferConfigSpecSensitiveParamsCodec struct {
}

+k8s:deepcopy-gen=false

func (DataTransferConfigSpecSensitiveParamsCodec) Decode

func (DataTransferConfigSpecSensitiveParamsCodec) Encode

func (DataTransferConfigSpecSensitiveParamsCodec) IsEmpty

type DataTransferConfigStatus

type DataTransferConfigStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*DataTransferConfigStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigStatus.

func (*DataTransferConfigStatus) DeepCopyInto

func (in *DataTransferConfigStatus) DeepCopyInto(out *DataTransferConfigStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Dataset

type Dataset struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              DatasetSpec   `json:"spec,omitempty"`
	Status            DatasetStatus `json:"status,omitempty"`
}

func (*Dataset) DeepCopy

func (in *Dataset) DeepCopy() *Dataset

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Dataset.

func (*Dataset) DeepCopyInto

func (in *Dataset) DeepCopyInto(out *Dataset)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Dataset) DeepCopyObject

func (in *Dataset) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Dataset) SetupWebhookWithManager

func (r *Dataset) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Dataset) ValidateCreate

func (r *Dataset) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Dataset) ValidateDelete

func (r *Dataset) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*Dataset) ValidateUpdate

func (r *Dataset) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type DatasetAccess

type DatasetAccess struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              DatasetAccessSpec   `json:"spec,omitempty"`
	Status            DatasetAccessStatus `json:"status,omitempty"`
}

func (*DatasetAccess) DeepCopy

func (in *DatasetAccess) DeepCopy() *DatasetAccess

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccess.

func (*DatasetAccess) DeepCopyInto

func (in *DatasetAccess) DeepCopyInto(out *DatasetAccess)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetAccess) DeepCopyObject

func (in *DatasetAccess) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DatasetAccess) SetupWebhookWithManager

func (r *DatasetAccess) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DatasetAccess) ValidateCreate

func (r *DatasetAccess) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DatasetAccess) ValidateDelete

func (r *DatasetAccess) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*DatasetAccess) ValidateUpdate

func (r *DatasetAccess) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type DatasetAccessList

type DatasetAccessList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of DatasetAccess CRD objects
	Items []DatasetAccess `json:"items,omitempty"`
}

DatasetAccessList is a list of DatasetAccesss

func (*DatasetAccessList) DeepCopy

func (in *DatasetAccessList) DeepCopy() *DatasetAccessList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessList.

func (*DatasetAccessList) DeepCopyInto

func (in *DatasetAccessList) DeepCopyInto(out *DatasetAccessList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetAccessList) DeepCopyObject

func (in *DatasetAccessList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type DatasetAccessSpec

type DatasetAccessSpec struct {
	State *DatasetAccessSpecResource `json:"state,omitempty" tf:"-"`

	Resource DatasetAccessSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*DatasetAccessSpec) DeepCopy

func (in *DatasetAccessSpec) DeepCopy() *DatasetAccessSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessSpec.

func (*DatasetAccessSpec) DeepCopyInto

func (in *DatasetAccessSpec) DeepCopyInto(out *DatasetAccessSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetAccessSpecDataset added in v0.5.0

type DatasetAccessSpecDataset struct {
	// The dataset this entry applies to
	Dataset *DatasetAccessSpecDatasetDataset `json:"dataset" tf:"dataset"`
	// Which resources in the dataset this entry applies to. Currently, only views are supported,
	// but additional target types may be added in the future. Possible values: VIEWS
	TargetTypes []string `json:"targetTypes" tf:"target_types"`
}

func (*DatasetAccessSpecDataset) DeepCopy added in v0.5.0

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessSpecDataset.

func (*DatasetAccessSpecDataset) DeepCopyInto added in v0.5.0

func (in *DatasetAccessSpecDataset) DeepCopyInto(out *DatasetAccessSpecDataset)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetAccessSpecDatasetCodec added in v0.5.0

type DatasetAccessSpecDatasetCodec struct {
}

+k8s:deepcopy-gen=false

func (DatasetAccessSpecDatasetCodec) Decode added in v0.5.0

func (DatasetAccessSpecDatasetCodec) Encode added in v0.5.0

func (DatasetAccessSpecDatasetCodec) IsEmpty added in v0.5.0

type DatasetAccessSpecDatasetDataset added in v0.5.0

type DatasetAccessSpecDatasetDataset struct {
	// The ID of the dataset containing this table.
	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// The ID of the project containing this table.
	ProjectID *string `json:"projectID" tf:"project_id"`
}

func (*DatasetAccessSpecDatasetDataset) DeepCopy added in v0.5.0

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessSpecDatasetDataset.

func (*DatasetAccessSpecDatasetDataset) DeepCopyInto added in v0.5.0

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetAccessSpecDatasetDatasetCodec added in v0.5.0

type DatasetAccessSpecDatasetDatasetCodec struct {
}

+k8s:deepcopy-gen=false

func (DatasetAccessSpecDatasetDatasetCodec) Decode added in v0.5.0

func (DatasetAccessSpecDatasetDatasetCodec) Encode added in v0.5.0

func (DatasetAccessSpecDatasetDatasetCodec) IsEmpty added in v0.5.0

type DatasetAccessSpecResource

type DatasetAccessSpecResource struct {
	Timeouts *base.ResourceTimeout `json:"timeouts,omitempty" tf:"timeouts"`

	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// If true, represents that that the iam_member in the config was translated to a different member type by the API, and is stored in state as a different member type
	// +optional
	ApiUpdatedMember *bool `json:"apiUpdatedMember,omitempty" tf:"api_updated_member"`
	// Grants all resources of particular types in a particular dataset read access to the current dataset.
	// +optional
	Dataset *DatasetAccessSpecDataset `json:"dataset,omitempty" tf:"dataset"`
	// A unique ID for this dataset, without the project name. The ID
	// must contain only letters (a-z, A-Z), numbers (0-9), or
	// underscores (_). The maximum length is 1,024 characters.
	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// A domain to grant access to. Any users signed in with the
	// domain specified will be granted the specified access
	// +optional
	Domain *string `json:"domain,omitempty" tf:"domain"`
	// An email address of a Google Group to grant access to.
	// +optional
	GroupByEmail *string `json:"groupByEmail,omitempty" tf:"group_by_email"`
	// Some other type of member that appears in the IAM Policy but isn't a user,
	// group, domain, or special group. For example: 'allUsers'
	// +optional
	IamMember *string `json:"iamMember,omitempty" tf:"iam_member"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// Describes the rights granted to the user specified by the other
	// member of the access object. Basic, predefined, and custom roles are
	// supported. Predefined roles that have equivalent basic roles are
	// swapped by the API to their basic counterparts, and will show a diff
	// post-create. See
	// [official docs](https://cloud.google.com/bigquery/docs/access-control).
	// +optional
	Role *string `json:"role,omitempty" tf:"role"`
	// A special group to grant access to. Possible values include:
	//
	//
	// * 'projectOwners': Owners of the enclosing project.
	//
	//
	// * 'projectReaders': Readers of the enclosing project.
	//
	//
	// * 'projectWriters': Writers of the enclosing project.
	//
	//
	// * 'allAuthenticatedUsers': All authenticated BigQuery users.
	// +optional
	SpecialGroup *string `json:"specialGroup,omitempty" tf:"special_group"`
	// An email address of a user to grant access to. For example:
	// fred@example.com
	// +optional
	UserByEmail *string `json:"userByEmail,omitempty" tf:"user_by_email"`
	// A view from a different dataset to grant access to. Queries
	// executed against that view will have read access to tables in
	// this dataset. The role field is not required when this field is
	// set. If that view is updated by any user, access to the view
	// needs to be granted again via an update operation.
	// +optional
	View *DatasetAccessSpecView `json:"view,omitempty" tf:"view"`
}

func (*DatasetAccessSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessSpecResource.

func (*DatasetAccessSpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetAccessSpecView

type DatasetAccessSpecView struct {
	// The ID of the dataset containing this table.
	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// The ID of the project containing this table.
	ProjectID *string `json:"projectID" tf:"project_id"`
	// The ID of the table. The ID must contain only letters (a-z,
	// A-Z), numbers (0-9), or underscores (_). The maximum length
	// is 1,024 characters.
	TableID *string `json:"tableID" tf:"table_id"`
}

func (*DatasetAccessSpecView) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessSpecView.

func (*DatasetAccessSpecView) DeepCopyInto

func (in *DatasetAccessSpecView) DeepCopyInto(out *DatasetAccessSpecView)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetAccessSpecViewCodec

type DatasetAccessSpecViewCodec struct {
}

+k8s:deepcopy-gen=false

func (DatasetAccessSpecViewCodec) Decode

func (DatasetAccessSpecViewCodec) Encode

func (DatasetAccessSpecViewCodec) IsEmpty

type DatasetAccessStatus

type DatasetAccessStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*DatasetAccessStatus) DeepCopy

func (in *DatasetAccessStatus) DeepCopy() *DatasetAccessStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessStatus.

func (*DatasetAccessStatus) DeepCopyInto

func (in *DatasetAccessStatus) DeepCopyInto(out *DatasetAccessStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamBinding

type DatasetIamBinding struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              DatasetIamBindingSpec   `json:"spec,omitempty"`
	Status            DatasetIamBindingStatus `json:"status,omitempty"`
}

func (*DatasetIamBinding) DeepCopy

func (in *DatasetIamBinding) DeepCopy() *DatasetIamBinding

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamBinding.

func (*DatasetIamBinding) DeepCopyInto

func (in *DatasetIamBinding) DeepCopyInto(out *DatasetIamBinding)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetIamBinding) DeepCopyObject

func (in *DatasetIamBinding) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DatasetIamBinding) SetupWebhookWithManager

func (r *DatasetIamBinding) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DatasetIamBinding) ValidateCreate

func (r *DatasetIamBinding) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DatasetIamBinding) ValidateDelete

func (r *DatasetIamBinding) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*DatasetIamBinding) ValidateUpdate

func (r *DatasetIamBinding) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type DatasetIamBindingList

type DatasetIamBindingList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of DatasetIamBinding CRD objects
	Items []DatasetIamBinding `json:"items,omitempty"`
}

DatasetIamBindingList is a list of DatasetIamBindings

func (*DatasetIamBindingList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamBindingList.

func (*DatasetIamBindingList) DeepCopyInto

func (in *DatasetIamBindingList) DeepCopyInto(out *DatasetIamBindingList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetIamBindingList) DeepCopyObject

func (in *DatasetIamBindingList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type DatasetIamBindingSpec

type DatasetIamBindingSpec struct {
	State *DatasetIamBindingSpecResource `json:"state,omitempty" tf:"-"`

	Resource DatasetIamBindingSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*DatasetIamBindingSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamBindingSpec.

func (*DatasetIamBindingSpec) DeepCopyInto

func (in *DatasetIamBindingSpec) DeepCopyInto(out *DatasetIamBindingSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamBindingSpecCondition

type DatasetIamBindingSpecCondition struct {
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	Expression  *string `json:"expression" tf:"expression"`
	Title       *string `json:"title" tf:"title"`
}

func (*DatasetIamBindingSpecCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamBindingSpecCondition.

func (*DatasetIamBindingSpecCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamBindingSpecConditionCodec

type DatasetIamBindingSpecConditionCodec struct {
}

+k8s:deepcopy-gen=false

func (DatasetIamBindingSpecConditionCodec) Decode

func (DatasetIamBindingSpecConditionCodec) Encode

func (DatasetIamBindingSpecConditionCodec) IsEmpty

type DatasetIamBindingSpecResource

type DatasetIamBindingSpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// +optional
	Condition *DatasetIamBindingSpecCondition `json:"condition,omitempty" tf:"condition"`
	DatasetID *string                         `json:"datasetID" tf:"dataset_id"`
	// +optional
	Etag    *string  `json:"etag,omitempty" tf:"etag"`
	Members []string `json:"members" tf:"members"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	Role    *string `json:"role" tf:"role"`
}

func (*DatasetIamBindingSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamBindingSpecResource.

func (*DatasetIamBindingSpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamBindingStatus

type DatasetIamBindingStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*DatasetIamBindingStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamBindingStatus.

func (*DatasetIamBindingStatus) DeepCopyInto

func (in *DatasetIamBindingStatus) DeepCopyInto(out *DatasetIamBindingStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamMember

type DatasetIamMember struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              DatasetIamMemberSpec   `json:"spec,omitempty"`
	Status            DatasetIamMemberStatus `json:"status,omitempty"`
}

func (*DatasetIamMember) DeepCopy

func (in *DatasetIamMember) DeepCopy() *DatasetIamMember

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamMember.

func (*DatasetIamMember) DeepCopyInto

func (in *DatasetIamMember) DeepCopyInto(out *DatasetIamMember)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetIamMember) DeepCopyObject

func (in *DatasetIamMember) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DatasetIamMember) SetupWebhookWithManager

func (r *DatasetIamMember) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DatasetIamMember) ValidateCreate

func (r *DatasetIamMember) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DatasetIamMember) ValidateDelete

func (r *DatasetIamMember) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*DatasetIamMember) ValidateUpdate

func (r *DatasetIamMember) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type DatasetIamMemberList

type DatasetIamMemberList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of DatasetIamMember CRD objects
	Items []DatasetIamMember `json:"items,omitempty"`
}

DatasetIamMemberList is a list of DatasetIamMembers

func (*DatasetIamMemberList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamMemberList.

func (*DatasetIamMemberList) DeepCopyInto

func (in *DatasetIamMemberList) DeepCopyInto(out *DatasetIamMemberList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetIamMemberList) DeepCopyObject

func (in *DatasetIamMemberList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type DatasetIamMemberSpec

type DatasetIamMemberSpec struct {
	State *DatasetIamMemberSpecResource `json:"state,omitempty" tf:"-"`

	Resource DatasetIamMemberSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*DatasetIamMemberSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamMemberSpec.

func (*DatasetIamMemberSpec) DeepCopyInto

func (in *DatasetIamMemberSpec) DeepCopyInto(out *DatasetIamMemberSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamMemberSpecCondition

type DatasetIamMemberSpecCondition struct {
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	Expression  *string `json:"expression" tf:"expression"`
	Title       *string `json:"title" tf:"title"`
}

func (*DatasetIamMemberSpecCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamMemberSpecCondition.

func (*DatasetIamMemberSpecCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamMemberSpecConditionCodec

type DatasetIamMemberSpecConditionCodec struct {
}

+k8s:deepcopy-gen=false

func (DatasetIamMemberSpecConditionCodec) Decode

func (DatasetIamMemberSpecConditionCodec) Encode

func (DatasetIamMemberSpecConditionCodec) IsEmpty

type DatasetIamMemberSpecResource

type DatasetIamMemberSpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// +optional
	Condition *DatasetIamMemberSpecCondition `json:"condition,omitempty" tf:"condition"`
	DatasetID *string                        `json:"datasetID" tf:"dataset_id"`
	// +optional
	Etag   *string `json:"etag,omitempty" tf:"etag"`
	Member *string `json:"member" tf:"member"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	Role    *string `json:"role" tf:"role"`
}

func (*DatasetIamMemberSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamMemberSpecResource.

func (*DatasetIamMemberSpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamMemberStatus

type DatasetIamMemberStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*DatasetIamMemberStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamMemberStatus.

func (*DatasetIamMemberStatus) DeepCopyInto

func (in *DatasetIamMemberStatus) DeepCopyInto(out *DatasetIamMemberStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamPolicy

type DatasetIamPolicy struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              DatasetIamPolicySpec   `json:"spec,omitempty"`
	Status            DatasetIamPolicyStatus `json:"status,omitempty"`
}

func (*DatasetIamPolicy) DeepCopy

func (in *DatasetIamPolicy) DeepCopy() *DatasetIamPolicy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamPolicy.

func (*DatasetIamPolicy) DeepCopyInto

func (in *DatasetIamPolicy) DeepCopyInto(out *DatasetIamPolicy)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetIamPolicy) DeepCopyObject

func (in *DatasetIamPolicy) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DatasetIamPolicy) SetupWebhookWithManager

func (r *DatasetIamPolicy) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DatasetIamPolicy) ValidateCreate

func (r *DatasetIamPolicy) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DatasetIamPolicy) ValidateDelete

func (r *DatasetIamPolicy) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*DatasetIamPolicy) ValidateUpdate

func (r *DatasetIamPolicy) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type DatasetIamPolicyList

type DatasetIamPolicyList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of DatasetIamPolicy CRD objects
	Items []DatasetIamPolicy `json:"items,omitempty"`
}

DatasetIamPolicyList is a list of DatasetIamPolicys

func (*DatasetIamPolicyList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamPolicyList.

func (*DatasetIamPolicyList) DeepCopyInto

func (in *DatasetIamPolicyList) DeepCopyInto(out *DatasetIamPolicyList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetIamPolicyList) DeepCopyObject

func (in *DatasetIamPolicyList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type DatasetIamPolicySpec

type DatasetIamPolicySpec struct {
	State *DatasetIamPolicySpecResource `json:"state,omitempty" tf:"-"`

	Resource DatasetIamPolicySpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*DatasetIamPolicySpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamPolicySpec.

func (*DatasetIamPolicySpec) DeepCopyInto

func (in *DatasetIamPolicySpec) DeepCopyInto(out *DatasetIamPolicySpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamPolicySpecResource

type DatasetIamPolicySpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// +optional
	Etag       *string `json:"etag,omitempty" tf:"etag"`
	PolicyData *string `json:"policyData" tf:"policy_data"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
}

func (*DatasetIamPolicySpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamPolicySpecResource.

func (*DatasetIamPolicySpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetIamPolicyStatus

type DatasetIamPolicyStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*DatasetIamPolicyStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIamPolicyStatus.

func (*DatasetIamPolicyStatus) DeepCopyInto

func (in *DatasetIamPolicyStatus) DeepCopyInto(out *DatasetIamPolicyStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetList

type DatasetList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of Dataset CRD objects
	Items []Dataset `json:"items,omitempty"`
}

DatasetList is a list of Datasets

func (*DatasetList) DeepCopy

func (in *DatasetList) DeepCopy() *DatasetList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetList.

func (*DatasetList) DeepCopyInto

func (in *DatasetList) DeepCopyInto(out *DatasetList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetList) DeepCopyObject

func (in *DatasetList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type DatasetSpec

type DatasetSpec struct {
	State *DatasetSpecResource `json:"state,omitempty" tf:"-"`

	Resource DatasetSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*DatasetSpec) DeepCopy

func (in *DatasetSpec) DeepCopy() *DatasetSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpec.

func (*DatasetSpec) DeepCopyInto

func (in *DatasetSpec) DeepCopyInto(out *DatasetSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetSpecAccess

type DatasetSpecAccess struct {
	// Grants all resources of particular types in a particular dataset read access to the current dataset.
	// +optional
	Dataset *DatasetSpecAccessDataset `json:"dataset,omitempty" tf:"dataset"`
	// A domain to grant access to. Any users signed in with the
	// domain specified will be granted the specified access
	// +optional
	Domain *string `json:"domain,omitempty" tf:"domain"`
	// An email address of a Google Group to grant access to.
	// +optional
	GroupByEmail *string `json:"groupByEmail,omitempty" tf:"group_by_email"`
	// Describes the rights granted to the user specified by the other
	// member of the access object. Basic, predefined, and custom roles
	// are supported. Predefined roles that have equivalent basic roles
	// are swapped by the API to their basic counterparts. See
	// [official docs](https://cloud.google.com/bigquery/docs/access-control).
	// +optional
	Role *string `json:"role,omitempty" tf:"role"`
	// A special group to grant access to. Possible values include:
	//
	//
	// * 'projectOwners': Owners of the enclosing project.
	//
	//
	// * 'projectReaders': Readers of the enclosing project.
	//
	//
	// * 'projectWriters': Writers of the enclosing project.
	//
	//
	// * 'allAuthenticatedUsers': All authenticated BigQuery users.
	// +optional
	SpecialGroup *string `json:"specialGroup,omitempty" tf:"special_group"`
	// An email address of a user to grant access to. For example:
	// fred@example.com
	// +optional
	UserByEmail *string `json:"userByEmail,omitempty" tf:"user_by_email"`
	// A view from a different dataset to grant access to. Queries
	// executed against that view will have read access to tables in
	// this dataset. The role field is not required when this field is
	// set. If that view is updated by any user, access to the view
	// needs to be granted again via an update operation.
	// +optional
	View *DatasetSpecAccessView `json:"view,omitempty" tf:"view"`
}

func (*DatasetSpecAccess) DeepCopy

func (in *DatasetSpecAccess) DeepCopy() *DatasetSpecAccess

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpecAccess.

func (*DatasetSpecAccess) DeepCopyInto

func (in *DatasetSpecAccess) DeepCopyInto(out *DatasetSpecAccess)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetSpecAccessDataset added in v0.5.0

type DatasetSpecAccessDataset struct {
	// The dataset this entry applies to
	Dataset *DatasetSpecAccessDatasetDataset `json:"dataset" tf:"dataset"`
	// Which resources in the dataset this entry applies to. Currently, only views are supported,
	// but additional target types may be added in the future. Possible values: VIEWS
	TargetTypes []string `json:"targetTypes" tf:"target_types"`
}

func (*DatasetSpecAccessDataset) DeepCopy added in v0.5.0

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpecAccessDataset.

func (*DatasetSpecAccessDataset) DeepCopyInto added in v0.5.0

func (in *DatasetSpecAccessDataset) DeepCopyInto(out *DatasetSpecAccessDataset)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetSpecAccessDatasetCodec added in v0.5.0

type DatasetSpecAccessDatasetCodec struct {
}

+k8s:deepcopy-gen=false

func (DatasetSpecAccessDatasetCodec) Decode added in v0.5.0

func (DatasetSpecAccessDatasetCodec) Encode added in v0.5.0

func (DatasetSpecAccessDatasetCodec) IsEmpty added in v0.5.0

type DatasetSpecAccessDatasetDataset added in v0.5.0

type DatasetSpecAccessDatasetDataset struct {
	// The ID of the dataset containing this table.
	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// The ID of the project containing this table.
	ProjectID *string `json:"projectID" tf:"project_id"`
}

func (*DatasetSpecAccessDatasetDataset) DeepCopy added in v0.5.0

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpecAccessDatasetDataset.

func (*DatasetSpecAccessDatasetDataset) DeepCopyInto added in v0.5.0

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetSpecAccessDatasetDatasetCodec added in v0.5.0

type DatasetSpecAccessDatasetDatasetCodec struct {
}

+k8s:deepcopy-gen=false

func (DatasetSpecAccessDatasetDatasetCodec) Decode added in v0.5.0

func (DatasetSpecAccessDatasetDatasetCodec) Encode added in v0.5.0

func (DatasetSpecAccessDatasetDatasetCodec) IsEmpty added in v0.5.0

type DatasetSpecAccessView

type DatasetSpecAccessView struct {
	// The ID of the dataset containing this table.
	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// The ID of the project containing this table.
	ProjectID *string `json:"projectID" tf:"project_id"`
	// The ID of the table. The ID must contain only letters (a-z,
	// A-Z), numbers (0-9), or underscores (_). The maximum length
	// is 1,024 characters.
	TableID *string `json:"tableID" tf:"table_id"`
}

func (*DatasetSpecAccessView) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpecAccessView.

func (*DatasetSpecAccessView) DeepCopyInto

func (in *DatasetSpecAccessView) DeepCopyInto(out *DatasetSpecAccessView)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetSpecAccessViewCodec

type DatasetSpecAccessViewCodec struct {
}

+k8s:deepcopy-gen=false

func (DatasetSpecAccessViewCodec) Decode

func (DatasetSpecAccessViewCodec) Encode

func (DatasetSpecAccessViewCodec) IsEmpty

type DatasetSpecDefaultEncryptionConfiguration

type DatasetSpecDefaultEncryptionConfiguration struct {
	// Describes the Cloud KMS encryption key that will be used to protect destination
	// BigQuery table. The BigQuery Service Account associated with your project requires
	// access to this encryption key.
	KmsKeyName *string `json:"kmsKeyName" tf:"kms_key_name"`
}

func (*DatasetSpecDefaultEncryptionConfiguration) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpecDefaultEncryptionConfiguration.

func (*DatasetSpecDefaultEncryptionConfiguration) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetSpecDefaultEncryptionConfigurationCodec

type DatasetSpecDefaultEncryptionConfigurationCodec struct {
}

+k8s:deepcopy-gen=false

func (DatasetSpecDefaultEncryptionConfigurationCodec) Decode

func (DatasetSpecDefaultEncryptionConfigurationCodec) Encode

func (DatasetSpecDefaultEncryptionConfigurationCodec) IsEmpty

type DatasetSpecResource

type DatasetSpecResource struct {
	Timeouts *base.ResourceTimeout `json:"timeouts,omitempty" tf:"timeouts"`

	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// An array of objects that define dataset access for one or more entities.
	// +optional
	Access []DatasetSpecAccess `json:"access,omitempty" tf:"access"`
	// The time when this dataset was created, in milliseconds since the
	// epoch.
	// +optional
	CreationTime *int64 `json:"creationTime,omitempty" tf:"creation_time"`
	// A unique ID for this dataset, without the project name. The ID
	// must contain only letters (a-z, A-Z), numbers (0-9), or
	// underscores (_). The maximum length is 1,024 characters.
	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// The default encryption key for all tables in the dataset. Once this property is set,
	// all newly-created partitioned tables in the dataset will have encryption key set to
	// this value, unless table creation request (or query) overrides the key.
	// +optional
	DefaultEncryptionConfiguration *DatasetSpecDefaultEncryptionConfiguration `json:"defaultEncryptionConfiguration,omitempty" tf:"default_encryption_configuration"`
	// The default partition expiration for all partitioned tables in
	// the dataset, in milliseconds.
	//
	//
	// Once this property is set, all newly-created partitioned tables in
	// the dataset will have an 'expirationMs' property in the 'timePartitioning'
	// settings set to this value, and changing the value will only
	// affect new tables, not existing ones. The storage in a partition will
	// have an expiration time of its partition time plus this value.
	// Setting this property overrides the use of 'defaultTableExpirationMs'
	// for partitioned tables: only one of 'defaultTableExpirationMs' and
	// 'defaultPartitionExpirationMs' will be used for any new partitioned
	// table. If you provide an explicit 'timePartitioning.expirationMs' when
	// creating or updating a partitioned table, that value takes precedence
	// over the default partition expiration time indicated by this property.
	// +optional
	DefaultPartitionExpirationMs *int64 `json:"defaultPartitionExpirationMs,omitempty" tf:"default_partition_expiration_ms"`
	// The default lifetime of all tables in the dataset, in milliseconds.
	// The minimum value is 3600000 milliseconds (one hour).
	//
	//
	// Once this property is set, all newly-created tables in the dataset
	// will have an 'expirationTime' property set to the creation time plus
	// the value in this property, and changing the value will only affect
	// new tables, not existing ones. When the 'expirationTime' for a given
	// table is reached, that table will be deleted automatically.
	// If a table's 'expirationTime' is modified or removed before the
	// table expires, or if you provide an explicit 'expirationTime' when
	// creating a table, that value takes precedence over the default
	// expiration time indicated by this property.
	// +optional
	DefaultTableExpirationMs *int64 `json:"defaultTableExpirationMs,omitempty" tf:"default_table_expiration_ms"`
	// +optional
	DeleteContentsOnDestroy *bool `json:"deleteContentsOnDestroy,omitempty" tf:"delete_contents_on_destroy"`
	// A user-friendly description of the dataset
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	// A hash of the resource.
	// +optional
	Etag *string `json:"etag,omitempty" tf:"etag"`
	// A descriptive name for the dataset
	// +optional
	FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name"`
	// The labels associated with this dataset. You can use these to
	// organize and group your datasets
	// +optional
	Labels *map[string]string `json:"labels,omitempty" tf:"labels"`
	// The date when this dataset or any of its tables was last modified, in
	// milliseconds since the epoch.
	// +optional
	LastModifiedTime *int64 `json:"lastModifiedTime,omitempty" tf:"last_modified_time"`
	// The geographic location where the dataset should reside.
	// See [official docs](https://cloud.google.com/bigquery/docs/dataset-locations).
	//
	//
	// There are two types of locations, regional or multi-regional. A regional
	// location is a specific geographic place, such as Tokyo, and a multi-regional
	// location is a large geographic area, such as the United States, that
	// contains at least two geographic places.
	//
	//
	// The default value is multi-regional location 'US'.
	// Changing this forces a new resource to be created.
	// +optional
	Location *string `json:"location,omitempty" tf:"location"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// +optional
	SelfLink *string `json:"selfLink,omitempty" tf:"self_link"`
}

func (*DatasetSpecResource) DeepCopy

func (in *DatasetSpecResource) DeepCopy() *DatasetSpecResource

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpecResource.

func (*DatasetSpecResource) DeepCopyInto

func (in *DatasetSpecResource) DeepCopyInto(out *DatasetSpecResource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DatasetStatus

type DatasetStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*DatasetStatus) DeepCopy

func (in *DatasetStatus) DeepCopy() *DatasetStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetStatus.

func (*DatasetStatus) DeepCopyInto

func (in *DatasetStatus) DeepCopyInto(out *DatasetStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Job

type Job struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              JobSpec   `json:"spec,omitempty"`
	Status            JobStatus `json:"status,omitempty"`
}

func (*Job) DeepCopy

func (in *Job) DeepCopy() *Job

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Job.

func (*Job) DeepCopyInto

func (in *Job) DeepCopyInto(out *Job)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Job) DeepCopyObject

func (in *Job) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Job) SetupWebhookWithManager

func (r *Job) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Job) ValidateCreate

func (r *Job) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Job) ValidateDelete

func (r *Job) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*Job) ValidateUpdate

func (r *Job) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type JobList

type JobList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of Job CRD objects
	Items []Job `json:"items,omitempty"`
}

JobList is a list of Jobs

func (*JobList) DeepCopy

func (in *JobList) DeepCopy() *JobList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobList.

func (*JobList) DeepCopyInto

func (in *JobList) DeepCopyInto(out *JobList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*JobList) DeepCopyObject

func (in *JobList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type JobSpec

type JobSpec struct {
	State *JobSpecResource `json:"state,omitempty" tf:"-"`

	Resource JobSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*JobSpec) DeepCopy

func (in *JobSpec) DeepCopy() *JobSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpec.

func (*JobSpec) DeepCopyInto

func (in *JobSpec) DeepCopyInto(out *JobSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecCopy

type JobSpecCopy struct {
	// Specifies whether the job is allowed to create new tables. The following values are supported:
	// CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
	// CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result.
	// Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"]
	// +optional
	CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition"`
	// Custom encryption configuration (e.g., Cloud KMS keys)
	// +optional
	DestinationEncryptionConfiguration *JobSpecCopyDestinationEncryptionConfiguration `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration"`
	// The destination table.
	// +optional
	DestinationTable *JobSpecCopyDestinationTable `json:"destinationTable,omitempty" tf:"destination_table"`
	// Source tables to copy.
	SourceTables []JobSpecCopySourceTables `json:"sourceTables" tf:"source_tables"`
	// Specifies the action that occurs if the destination table already exists. The following values are supported:
	// WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result.
	// WRITE_APPEND: If the table already exists, BigQuery appends the data to the table.
	// WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result.
	// Each action is atomic and only occurs if BigQuery is able to complete the job successfully.
	// Creation, truncation and append actions occur as one atomic update upon job completion. Default value: "WRITE_EMPTY" Possible values: ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"]
	// +optional
	WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition"`
}

func (*JobSpecCopy) DeepCopy

func (in *JobSpecCopy) DeepCopy() *JobSpecCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecCopy.

func (*JobSpecCopy) DeepCopyInto

func (in *JobSpecCopy) DeepCopyInto(out *JobSpecCopy)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecCopyCodec

type JobSpecCopyCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecCopyCodec) Decode

func (JobSpecCopyCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator)

func (JobSpecCopyCodec) Encode

func (JobSpecCopyCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecCopyCodec) IsEmpty

func (JobSpecCopyCodec) IsEmpty(ptr unsafe.Pointer) bool

type JobSpecCopyDestinationEncryptionConfiguration

type JobSpecCopyDestinationEncryptionConfiguration struct {
	// Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table.
	// The BigQuery Service Account associated with your project requires access to this encryption key.
	KmsKeyName *string `json:"kmsKeyName" tf:"kms_key_name"`
	// Describes the Cloud KMS encryption key version used to protect destination BigQuery table.
	// +optional
	KmsKeyVersion *string `json:"kmsKeyVersion,omitempty" tf:"kms_key_version"`
}

func (*JobSpecCopyDestinationEncryptionConfiguration) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecCopyDestinationEncryptionConfiguration.

func (*JobSpecCopyDestinationEncryptionConfiguration) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecCopyDestinationEncryptionConfigurationCodec

type JobSpecCopyDestinationEncryptionConfigurationCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecCopyDestinationEncryptionConfigurationCodec) Decode

func (JobSpecCopyDestinationEncryptionConfigurationCodec) Encode

func (JobSpecCopyDestinationEncryptionConfigurationCodec) IsEmpty

type JobSpecCopyDestinationTable

type JobSpecCopyDestinationTable struct {
	// The ID of the dataset containing this table.
	// +optional
	DatasetID *string `json:"datasetID,omitempty" tf:"dataset_id"`
	// The ID of the project containing this table.
	// +optional
	ProjectID *string `json:"projectID,omitempty" tf:"project_id"`
	// The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set,
	// or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
	TableID *string `json:"tableID" tf:"table_id"`
}

func (*JobSpecCopyDestinationTable) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecCopyDestinationTable.

func (*JobSpecCopyDestinationTable) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecCopyDestinationTableCodec

type JobSpecCopyDestinationTableCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecCopyDestinationTableCodec) Decode

func (JobSpecCopyDestinationTableCodec) Encode

func (JobSpecCopyDestinationTableCodec) IsEmpty

type JobSpecCopySourceTables

type JobSpecCopySourceTables struct {
	// The ID of the dataset containing this table.
	// +optional
	DatasetID *string `json:"datasetID,omitempty" tf:"dataset_id"`
	// The ID of the project containing this table.
	// +optional
	ProjectID *string `json:"projectID,omitempty" tf:"project_id"`
	// The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set,
	// or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
	TableID *string `json:"tableID" tf:"table_id"`
}

func (*JobSpecCopySourceTables) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecCopySourceTables.

func (*JobSpecCopySourceTables) DeepCopyInto

func (in *JobSpecCopySourceTables) DeepCopyInto(out *JobSpecCopySourceTables)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecExtract

type JobSpecExtract struct {
	// The compression type to use for exported files. Possible values include GZIP, DEFLATE, SNAPPY, and NONE.
	// The default value is NONE. DEFLATE and SNAPPY are only supported for Avro.
	// +optional
	Compression *string `json:"compression,omitempty" tf:"compression"`
	// The exported file format. Possible values include CSV, NEWLINE_DELIMITED_JSON and AVRO for tables and SAVED_MODEL for models.
	// The default value for tables is CSV. Tables with nested or repeated fields cannot be exported as CSV.
	// The default value for models is SAVED_MODEL.
	// +optional
	DestinationFormat *string `json:"destinationFormat,omitempty" tf:"destination_format"`
	// A list of fully-qualified Google Cloud Storage URIs where the extracted table should be written.
	DestinationUris []string `json:"destinationUris" tf:"destination_uris"`
	// When extracting data in CSV format, this defines the delimiter to use between fields in the exported data.
	// Default is ','
	// +optional
	FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter"`
	// Whether to print out a header row in the results. Default is true.
	// +optional
	PrintHeader *bool `json:"printHeader,omitempty" tf:"print_header"`
	// A reference to the model being exported.
	// +optional
	SourceModel *JobSpecExtractSourceModel `json:"sourceModel,omitempty" tf:"source_model"`
	// A reference to the table being exported.
	// +optional
	SourceTable *JobSpecExtractSourceTable `json:"sourceTable,omitempty" tf:"source_table"`
	// Whether to use logical types when extracting to AVRO format.
	// +optional
	UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty" tf:"use_avro_logical_types"`
}

func (*JobSpecExtract) DeepCopy

func (in *JobSpecExtract) DeepCopy() *JobSpecExtract

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecExtract.

func (*JobSpecExtract) DeepCopyInto

func (in *JobSpecExtract) DeepCopyInto(out *JobSpecExtract)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecExtractCodec

type JobSpecExtractCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecExtractCodec) Decode

func (JobSpecExtractCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator)

func (JobSpecExtractCodec) Encode

func (JobSpecExtractCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecExtractCodec) IsEmpty

func (JobSpecExtractCodec) IsEmpty(ptr unsafe.Pointer) bool

type JobSpecExtractSourceModel

type JobSpecExtractSourceModel struct {
	// The ID of the dataset containing this model.
	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// The ID of the model.
	ModelID *string `json:"modelID" tf:"model_id"`
	// The ID of the project containing this model.
	ProjectID *string `json:"projectID" tf:"project_id"`
}

func (*JobSpecExtractSourceModel) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecExtractSourceModel.

func (*JobSpecExtractSourceModel) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecExtractSourceModelCodec

type JobSpecExtractSourceModelCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecExtractSourceModelCodec) Decode

func (JobSpecExtractSourceModelCodec) Encode

func (JobSpecExtractSourceModelCodec) IsEmpty

type JobSpecExtractSourceTable

type JobSpecExtractSourceTable struct {
	// The ID of the dataset containing this table.
	// +optional
	DatasetID *string `json:"datasetID,omitempty" tf:"dataset_id"`
	// The ID of the project containing this table.
	// +optional
	ProjectID *string `json:"projectID,omitempty" tf:"project_id"`
	// The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set,
	// or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
	TableID *string `json:"tableID" tf:"table_id"`
}

func (*JobSpecExtractSourceTable) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecExtractSourceTable.

func (*JobSpecExtractSourceTable) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecExtractSourceTableCodec

type JobSpecExtractSourceTableCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecExtractSourceTableCodec) Decode

func (JobSpecExtractSourceTableCodec) Encode

func (JobSpecExtractSourceTableCodec) IsEmpty

type JobSpecLoad

type JobSpecLoad struct {
	// Accept rows that are missing trailing optional columns. The missing values are treated as nulls.
	// If false, records with missing trailing columns are treated as bad records, and if there are too many bad records,
	// an invalid error is returned in the job result. The default value is false. Only applicable to CSV, ignored for other formats.
	// +optional
	AllowJaggedRows *bool `json:"allowJaggedRows,omitempty" tf:"allow_jagged_rows"`
	// Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file.
	// The default value is false.
	// +optional
	AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty" tf:"allow_quoted_newlines"`
	// Indicates if we should automatically infer the options and schema for CSV and JSON sources.
	// +optional
	Autodetect *bool `json:"autodetect,omitempty" tf:"autodetect"`
	// Specifies whether the job is allowed to create new tables. The following values are supported:
	// CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
	// CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result.
	// Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"]
	// +optional
	CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition"`
	// Custom encryption configuration (e.g., Cloud KMS keys)
	// +optional
	DestinationEncryptionConfiguration *JobSpecLoadDestinationEncryptionConfiguration `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration"`
	// The destination table to load the data into.
	DestinationTable *JobSpecLoadDestinationTable `json:"destinationTable" tf:"destination_table"`
	// The character encoding of the data. The supported values are UTF-8 or ISO-8859-1.
	// The default value is UTF-8. BigQuery decodes the data after the raw, binary data
	// has been split using the values of the quote and fieldDelimiter properties.
	// +optional
	Encoding *string `json:"encoding,omitempty" tf:"encoding"`
	// The separator for fields in a CSV file. The separator can be any ISO-8859-1 single-byte character.
	// To use a character in the range 128-255, you must encode the character as UTF8. BigQuery converts
	// the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the
	// data in its raw, binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator.
	// The default value is a comma (',').
	// +optional
	FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter"`
	// Indicates if BigQuery should allow extra values that are not represented in the table schema.
	// If true, the extra values are ignored. If false, records with extra columns are treated as bad records,
	// and if there are too many bad records, an invalid error is returned in the job result.
	// The default value is false. The sourceFormat property determines what BigQuery treats as an extra value:
	// CSV: Trailing columns
	// JSON: Named values that don't match any column names
	// +optional
	IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty" tf:"ignore_unknown_values"`
	// The maximum number of bad records that BigQuery can ignore when running the job. If the number of bad records exceeds this value,
	// an invalid error is returned in the job result. The default value is 0, which requires that all records are valid.
	// +optional
	MaxBadRecords *int64 `json:"maxBadRecords,omitempty" tf:"max_bad_records"`
	// Specifies a string that represents a null value in a CSV file. For example, if you specify "\\N", BigQuery interprets "\\N" as a null value
	// when loading a CSV file. The default value is the empty string. If you set this property to a custom value, BigQuery throws an error if an
	// empty string is present for all data types except for STRING and BYTE. For STRING and BYTE columns, BigQuery interprets the empty string as
	// an empty value.
	// +optional
	NullMarker *string `json:"nullMarker,omitempty" tf:"null_marker"`
	// If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity properties to load into BigQuery from a Cloud Datastore backup.
	// Property names are case sensitive and must be top-level properties. If no properties are specified, BigQuery loads all properties.
	// If any named property isn't found in the Cloud Datastore backup, an invalid error is returned in the job result.
	// +optional
	ProjectionFields []string `json:"projectionFields,omitempty" tf:"projection_fields"`
	// The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding,
	// and then uses the first byte of the encoded string to split the data in its raw, binary state.
	// The default value is a double-quote ('"'). If your data does not contain quoted sections, set the property value to an empty string.
	// If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true.
	// +optional
	Quote *string `json:"quote,omitempty" tf:"quote"`
	// Allows the schema of the destination table to be updated as a side effect of the load job if a schema is autodetected or
	// supplied in the job configuration. Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND;
	// when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators.
	// For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified:
	// ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema.
	// ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable.
	// +optional
	SchemaUpdateOptions []string `json:"schemaUpdateOptions,omitempty" tf:"schema_update_options"`
	// The number of rows at the top of a CSV file that BigQuery will skip when loading the data.
	// The default value is 0. This property is useful if you have header rows in the file that should be skipped.
	// When autodetect is on, the behavior is the following:
	// skipLeadingRows unspecified - Autodetect tries to detect headers in the first row. If they are not detected,
	// the row is read as data. Otherwise data is read starting from the second row.
	// skipLeadingRows is 0 - Instructs autodetect that there are no headers and data should be read starting from the first row.
	// skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect headers in row N. If headers are not detected,
	// row N is just skipped. Otherwise row N is used to extract column names for the detected schema.
	// +optional
	SkipLeadingRows *int64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows"`
	// The format of the data files. For CSV files, specify "CSV". For datastore backups, specify "DATASTORE_BACKUP".
	// For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". For parquet, specify "PARQUET".
	// For orc, specify "ORC". [Beta] For Bigtable, specify "BIGTABLE".
	// The default value is CSV.
	// +optional
	SourceFormat *string `json:"sourceFormat,omitempty" tf:"source_format"`
	// The fully-qualified URIs that point to your data in Google Cloud.
	// For Google Cloud Storage URIs: Each URI can contain one '*' wildcard character
	// and it must come after the 'bucket' name. Size limits related to load jobs apply
	// to external data sources. For Google Cloud Bigtable URIs: Exactly one URI can be
	// specified and it has be a fully specified and valid HTTPS URL for a Google Cloud Bigtable table.
	// For Google Cloud Datastore backups: Exactly one URI can be specified. Also, the '*' wildcard character is not allowed.
	SourceUris []string `json:"sourceUris" tf:"source_uris"`
	// Time-based partitioning specification for the destination table.
	// +optional
	TimePartitioning *JobSpecLoadTimePartitioning `json:"timePartitioning,omitempty" tf:"time_partitioning"`
	// Specifies the action that occurs if the destination table already exists. The following values are supported:
	// WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result.
	// WRITE_APPEND: If the table already exists, BigQuery appends the data to the table.
	// WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result.
	// Each action is atomic and only occurs if BigQuery is able to complete the job successfully.
	// Creation, truncation and append actions occur as one atomic update upon job completion. Default value: "WRITE_EMPTY" Possible values: ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"]
	// +optional
	WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition"`
}

func (*JobSpecLoad) DeepCopy

func (in *JobSpecLoad) DeepCopy() *JobSpecLoad

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecLoad.

func (*JobSpecLoad) DeepCopyInto

func (in *JobSpecLoad) DeepCopyInto(out *JobSpecLoad)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecLoadCodec

type JobSpecLoadCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecLoadCodec) Decode

func (JobSpecLoadCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator)

func (JobSpecLoadCodec) Encode

func (JobSpecLoadCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecLoadCodec) IsEmpty

func (JobSpecLoadCodec) IsEmpty(ptr unsafe.Pointer) bool

type JobSpecLoadDestinationEncryptionConfiguration

type JobSpecLoadDestinationEncryptionConfiguration struct {
	// Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table.
	// The BigQuery Service Account associated with your project requires access to this encryption key.
	KmsKeyName *string `json:"kmsKeyName" tf:"kms_key_name"`
	// Describes the Cloud KMS encryption key version used to protect destination BigQuery table.
	// +optional
	KmsKeyVersion *string `json:"kmsKeyVersion,omitempty" tf:"kms_key_version"`
}

func (*JobSpecLoadDestinationEncryptionConfiguration) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecLoadDestinationEncryptionConfiguration.

func (*JobSpecLoadDestinationEncryptionConfiguration) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecLoadDestinationEncryptionConfigurationCodec

type JobSpecLoadDestinationEncryptionConfigurationCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecLoadDestinationEncryptionConfigurationCodec) Decode

func (JobSpecLoadDestinationEncryptionConfigurationCodec) Encode

func (JobSpecLoadDestinationEncryptionConfigurationCodec) IsEmpty

type JobSpecLoadDestinationTable

type JobSpecLoadDestinationTable struct {
	// The ID of the dataset containing this table.
	// +optional
	DatasetID *string `json:"datasetID,omitempty" tf:"dataset_id"`
	// The ID of the project containing this table.
	// +optional
	ProjectID *string `json:"projectID,omitempty" tf:"project_id"`
	// The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set,
	// or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
	TableID *string `json:"tableID" tf:"table_id"`
}

func (*JobSpecLoadDestinationTable) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecLoadDestinationTable.

func (*JobSpecLoadDestinationTable) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecLoadDestinationTableCodec

type JobSpecLoadDestinationTableCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecLoadDestinationTableCodec) Decode

func (JobSpecLoadDestinationTableCodec) Encode

func (JobSpecLoadDestinationTableCodec) IsEmpty

type JobSpecLoadTimePartitioning

type JobSpecLoadTimePartitioning struct {
	// Number of milliseconds for which to keep the storage for a partition. A wrapper is used here because 0 is an invalid value.
	// +optional
	ExpirationMs *string `json:"expirationMs,omitempty" tf:"expiration_ms"`
	// If not set, the table is partitioned by pseudo column '_PARTITIONTIME'; if set, the table is partitioned by this field.
	// The field must be a top-level TIMESTAMP or DATE field. Its mode must be NULLABLE or REQUIRED.
	// A wrapper is used here because an empty string is an invalid value.
	// +optional
	Field *string `json:"field,omitempty" tf:"field"`
	// The only type supported is DAY, which will generate one partition per day. Providing an empty string used to cause an error,
	// but in OnePlatform the field will be treated as unset.
	Type *string `json:"type" tf:"type"`
}

func (*JobSpecLoadTimePartitioning) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecLoadTimePartitioning.

func (*JobSpecLoadTimePartitioning) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecLoadTimePartitioningCodec

type JobSpecLoadTimePartitioningCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecLoadTimePartitioningCodec) Decode

func (JobSpecLoadTimePartitioningCodec) Encode

func (JobSpecLoadTimePartitioningCodec) IsEmpty

type JobSpecQuery

type JobSpecQuery struct {
	// If true and query uses legacy SQL dialect, allows the query to produce arbitrarily large result tables at a slight cost in performance.
	// Requires destinationTable to be set. For standard SQL queries, this flag is ignored and large results are always allowed.
	// However, you must still set destinationTable when result size exceeds the allowed maximum response size.
	// +optional
	AllowLargeResults *bool `json:"allowLargeResults,omitempty" tf:"allow_large_results"`
	// Specifies whether the job is allowed to create new tables. The following values are supported:
	// CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
	// CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result.
	// Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"]
	// +optional
	CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition"`
	// Specifies the default dataset to use for unqualified table names in the query. Note that this does not alter behavior of unqualified dataset names.
	// +optional
	DefaultDataset *JobSpecQueryDefaultDataset `json:"defaultDataset,omitempty" tf:"default_dataset"`
	// Custom encryption configuration (e.g., Cloud KMS keys)
	// +optional
	DestinationEncryptionConfiguration *JobSpecQueryDestinationEncryptionConfiguration `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration"`
	// Describes the table where the query results should be stored.
	// This property must be set for large results that exceed the maximum response size.
	// For queries that produce anonymous (cached) results, this field will be populated by BigQuery.
	// +optional
	DestinationTable *JobSpecQueryDestinationTable `json:"destinationTable,omitempty" tf:"destination_table"`
	// If true and query uses legacy SQL dialect, flattens all nested and repeated fields in the query results.
	// allowLargeResults must be true if this is set to false. For standard SQL queries, this flag is ignored and results are never flattened.
	// +optional
	FlattenResults *bool `json:"flattenResults,omitempty" tf:"flatten_results"`
	// Limits the billing tier for this job. Queries that have resource usage beyond this tier will fail (without incurring a charge).
	// If unspecified, this will be set to your project default.
	// +optional
	MaximumBillingTier *int64 `json:"maximumBillingTier,omitempty" tf:"maximum_billing_tier"`
	// Limits the bytes billed for this job. Queries that will have bytes billed beyond this limit will fail (without incurring a charge).
	// If unspecified, this will be set to your project default.
	// +optional
	MaximumBytesBilled *string `json:"maximumBytesBilled,omitempty" tf:"maximum_bytes_billed"`
	// Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use named (@myparam) query parameters in this query.
	// +optional
	ParameterMode *string `json:"parameterMode,omitempty" tf:"parameter_mode"`
	// Specifies a priority for the query. Default value: "INTERACTIVE" Possible values: ["INTERACTIVE", "BATCH"]
	// +optional
	Priority *string `json:"priority,omitempty" tf:"priority"`
	// SQL query text to execute. The useLegacySql field can be used to indicate whether the query uses legacy SQL or standard SQL.
	// *NOTE*: queries containing [DML language](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language)
	// ('DELETE', 'UPDATE', 'MERGE', 'INSERT') must specify 'create_disposition = ""' and 'write_disposition = ""'.
	Query *string `json:"query" tf:"query"`
	// Allows the schema of the destination table to be updated as a side effect of the query job.
	// Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND;
	// when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table,
	// specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the schema.
	// One or more of the following values are specified:
	// ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema.
	// ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable.
	// +optional
	SchemaUpdateOptions []string `json:"schemaUpdateOptions,omitempty" tf:"schema_update_options"`
	// Options controlling the execution of scripts.
	// +optional
	ScriptOptions *JobSpecQueryScriptOptions `json:"scriptOptions,omitempty" tf:"script_options"`
	// Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true.
	// If set to false, the query will use BigQuery's standard SQL.
	// +optional
	UseLegacySQL *bool `json:"useLegacySQL,omitempty" tf:"use_legacy_sql"`
	// Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever
	// tables in the query are modified. Moreover, the query cache is only available when a query does not have a destination table specified.
	// The default value is true.
	// +optional
	UseQueryCache *bool `json:"useQueryCache,omitempty" tf:"use_query_cache"`
	// Describes user-defined function resources used in the query.
	// +optional
	UserDefinedFunctionResources []JobSpecQueryUserDefinedFunctionResources `json:"userDefinedFunctionResources,omitempty" tf:"user_defined_function_resources"`
	// Specifies the action that occurs if the destination table already exists. The following values are supported:
	// WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result.
	// WRITE_APPEND: If the table already exists, BigQuery appends the data to the table.
	// WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result.
	// Each action is atomic and only occurs if BigQuery is able to complete the job successfully.
	// Creation, truncation and append actions occur as one atomic update upon job completion. Default value: "WRITE_EMPTY" Possible values: ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"]
	// +optional
	WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition"`
}

func (*JobSpecQuery) DeepCopy

func (in *JobSpecQuery) DeepCopy() *JobSpecQuery

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecQuery.

func (*JobSpecQuery) DeepCopyInto

func (in *JobSpecQuery) DeepCopyInto(out *JobSpecQuery)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecQueryCodec

type JobSpecQueryCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecQueryCodec) Decode

func (JobSpecQueryCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator)

func (JobSpecQueryCodec) Encode

func (JobSpecQueryCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (JobSpecQueryCodec) IsEmpty

func (JobSpecQueryCodec) IsEmpty(ptr unsafe.Pointer) bool

type JobSpecQueryDefaultDataset

type JobSpecQueryDefaultDataset struct {
	// The dataset. Can be specified '{{dataset_id}}' if 'project_id' is also set,
	// or of the form 'projects/{{project}}/datasets/{{dataset_id}}' if not.
	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// The ID of the project containing this table.
	// +optional
	ProjectID *string `json:"projectID,omitempty" tf:"project_id"`
}

func (*JobSpecQueryDefaultDataset) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecQueryDefaultDataset.

func (*JobSpecQueryDefaultDataset) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecQueryDefaultDatasetCodec

type JobSpecQueryDefaultDatasetCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecQueryDefaultDatasetCodec) Decode

func (JobSpecQueryDefaultDatasetCodec) Encode

func (JobSpecQueryDefaultDatasetCodec) IsEmpty

type JobSpecQueryDestinationEncryptionConfiguration

type JobSpecQueryDestinationEncryptionConfiguration struct {
	// Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table.
	// The BigQuery Service Account associated with your project requires access to this encryption key.
	KmsKeyName *string `json:"kmsKeyName" tf:"kms_key_name"`
	// Describes the Cloud KMS encryption key version used to protect destination BigQuery table.
	// +optional
	KmsKeyVersion *string `json:"kmsKeyVersion,omitempty" tf:"kms_key_version"`
}

func (*JobSpecQueryDestinationEncryptionConfiguration) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecQueryDestinationEncryptionConfiguration.

func (*JobSpecQueryDestinationEncryptionConfiguration) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecQueryDestinationEncryptionConfigurationCodec

type JobSpecQueryDestinationEncryptionConfigurationCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecQueryDestinationEncryptionConfigurationCodec) Decode

func (JobSpecQueryDestinationEncryptionConfigurationCodec) Encode

func (JobSpecQueryDestinationEncryptionConfigurationCodec) IsEmpty

type JobSpecQueryDestinationTable

type JobSpecQueryDestinationTable struct {
	// The ID of the dataset containing this table.
	// +optional
	DatasetID *string `json:"datasetID,omitempty" tf:"dataset_id"`
	// The ID of the project containing this table.
	// +optional
	ProjectID *string `json:"projectID,omitempty" tf:"project_id"`
	// The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set,
	// or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
	TableID *string `json:"tableID" tf:"table_id"`
}

func (*JobSpecQueryDestinationTable) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecQueryDestinationTable.

func (*JobSpecQueryDestinationTable) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecQueryDestinationTableCodec

type JobSpecQueryDestinationTableCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecQueryDestinationTableCodec) Decode

func (JobSpecQueryDestinationTableCodec) Encode

func (JobSpecQueryDestinationTableCodec) IsEmpty

type JobSpecQueryScriptOptions

type JobSpecQueryScriptOptions struct {
	// Determines which statement in the script represents the "key result",
	// used to populate the schema and query results of the script job. Possible values: ["LAST", "FIRST_SELECT"]
	// +optional
	KeyResultStatement *string `json:"keyResultStatement,omitempty" tf:"key_result_statement"`
	// Limit on the number of bytes billed per statement. Exceeding this budget results in an error.
	// +optional
	StatementByteBudget *string `json:"statementByteBudget,omitempty" tf:"statement_byte_budget"`
	// Timeout period for each statement in a script.
	// +optional
	StatementTimeoutMs *string `json:"statementTimeoutMs,omitempty" tf:"statement_timeout_ms"`
}

func (*JobSpecQueryScriptOptions) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecQueryScriptOptions.

func (*JobSpecQueryScriptOptions) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecQueryScriptOptionsCodec

type JobSpecQueryScriptOptionsCodec struct {
}

+k8s:deepcopy-gen=false

func (JobSpecQueryScriptOptionsCodec) Decode

func (JobSpecQueryScriptOptionsCodec) Encode

func (JobSpecQueryScriptOptionsCodec) IsEmpty

type JobSpecQueryUserDefinedFunctionResources

type JobSpecQueryUserDefinedFunctionResources struct {
	// An inline resource that contains code for a user-defined function (UDF).
	// Providing a inline code resource is equivalent to providing a URI for a file containing the same code.
	// +optional
	InlineCode *string `json:"inlineCode,omitempty" tf:"inline_code"`
	// A code resource to load from a Google Cloud Storage URI (gs://bucket/path).
	// +optional
	ResourceURI *string `json:"resourceURI,omitempty" tf:"resource_uri"`
}

func (*JobSpecQueryUserDefinedFunctionResources) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecQueryUserDefinedFunctionResources.

func (*JobSpecQueryUserDefinedFunctionResources) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecResource

type JobSpecResource struct {
	Timeouts *base.ResourceTimeout `json:"timeouts,omitempty" tf:"timeouts"`

	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// Copies a table.
	// +optional
	Copy *JobSpecCopy `json:"copy,omitempty" tf:"copy"`
	// Configures an extract job.
	// +optional
	Extract *JobSpecExtract `json:"extract,omitempty" tf:"extract"`
	// The ID of the job. The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 characters.
	JobID *string `json:"jobID" tf:"job_id"`
	// Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt to terminate the job.
	// +optional
	JobTimeoutMs *string `json:"jobTimeoutMs,omitempty" tf:"job_timeout_ms"`
	// The type of the job.
	// +optional
	JobType *string `json:"jobType,omitempty" tf:"job_type"`
	// The labels associated with this job. You can use these to organize and group your jobs.
	// +optional
	Labels *map[string]string `json:"labels,omitempty" tf:"labels"`
	// Configures a load job.
	// +optional
	Load *JobSpecLoad `json:"load,omitempty" tf:"load"`
	// The geographic location of the job. The default value is US.
	// +optional
	Location *string `json:"location,omitempty" tf:"location"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// Configures a query job.
	// +optional
	Query *JobSpecQuery `json:"query,omitempty" tf:"query"`
	// The status of this job. Examine this value when polling an asynchronous job to see if the job is complete.
	// +optional
	Status []JobSpecStatus `json:"status,omitempty" tf:"status"`
	// Email address of the user who ran the job.
	// +optional
	UserEmail *string `json:"userEmail,omitempty" tf:"user_email"`
}

func (*JobSpecResource) DeepCopy

func (in *JobSpecResource) DeepCopy() *JobSpecResource

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecResource.

func (*JobSpecResource) DeepCopyInto

func (in *JobSpecResource) DeepCopyInto(out *JobSpecResource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecStatus

type JobSpecStatus struct {
	// Final error result of the job. If present, indicates that the job has completed and was unsuccessful.
	// +optional
	ErrorResult []JobSpecStatusErrorResult `json:"errorResult,omitempty" tf:"error_result"`
	// The first errors encountered during the running of the job. The final message
	// includes the number of errors that caused the process to stop. Errors here do
	// not necessarily mean that the job has not completed or was unsuccessful.
	// +optional
	Errors []JobSpecStatusErrors `json:"errors,omitempty" tf:"errors"`
	// Running state of the job. Valid states include 'PENDING', 'RUNNING', and 'DONE'.
	// +optional
	State *string `json:"state,omitempty" tf:"state"`
}

func (*JobSpecStatus) DeepCopy

func (in *JobSpecStatus) DeepCopy() *JobSpecStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecStatus.

func (*JobSpecStatus) DeepCopyInto

func (in *JobSpecStatus) DeepCopyInto(out *JobSpecStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecStatusErrorResult

type JobSpecStatusErrorResult struct {
	// Specifies where the error occurred, if present.
	// +optional
	Location *string `json:"location,omitempty" tf:"location"`
	// A human-readable description of the error.
	// +optional
	Message *string `json:"message,omitempty" tf:"message"`
	// A short error code that summarizes the error.
	// +optional
	Reason *string `json:"reason,omitempty" tf:"reason"`
}

func (*JobSpecStatusErrorResult) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecStatusErrorResult.

func (*JobSpecStatusErrorResult) DeepCopyInto

func (in *JobSpecStatusErrorResult) DeepCopyInto(out *JobSpecStatusErrorResult)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobSpecStatusErrors

type JobSpecStatusErrors struct {
	// Specifies where the error occurred, if present.
	// +optional
	Location *string `json:"location,omitempty" tf:"location"`
	// A human-readable description of the error.
	// +optional
	Message *string `json:"message,omitempty" tf:"message"`
	// A short error code that summarizes the error.
	// +optional
	Reason *string `json:"reason,omitempty" tf:"reason"`
}

func (*JobSpecStatusErrors) DeepCopy

func (in *JobSpecStatusErrors) DeepCopy() *JobSpecStatusErrors

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpecStatusErrors.

func (*JobSpecStatusErrors) DeepCopyInto

func (in *JobSpecStatusErrors) DeepCopyInto(out *JobSpecStatusErrors)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type JobStatus

type JobStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*JobStatus) DeepCopy

func (in *JobStatus) DeepCopy() *JobStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatus.

func (*JobStatus) DeepCopyInto

func (in *JobStatus) DeepCopyInto(out *JobStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Reservation

type Reservation struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              ReservationSpec   `json:"spec,omitempty"`
	Status            ReservationStatus `json:"status,omitempty"`
}

func (*Reservation) DeepCopy

func (in *Reservation) DeepCopy() *Reservation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Reservation.

func (*Reservation) DeepCopyInto

func (in *Reservation) DeepCopyInto(out *Reservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Reservation) DeepCopyObject

func (in *Reservation) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Reservation) SetupWebhookWithManager

func (r *Reservation) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Reservation) ValidateCreate

func (r *Reservation) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Reservation) ValidateDelete

func (r *Reservation) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*Reservation) ValidateUpdate

func (r *Reservation) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type ReservationList

type ReservationList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of Reservation CRD objects
	Items []Reservation `json:"items,omitempty"`
}

ReservationList is a list of Reservations

func (*ReservationList) DeepCopy

func (in *ReservationList) DeepCopy() *ReservationList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationList.

func (*ReservationList) DeepCopyInto

func (in *ReservationList) DeepCopyInto(out *ReservationList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ReservationList) DeepCopyObject

func (in *ReservationList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type ReservationSpec

type ReservationSpec struct {
	State *ReservationSpecResource `json:"state,omitempty" tf:"-"`

	Resource ReservationSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*ReservationSpec) DeepCopy

func (in *ReservationSpec) DeepCopy() *ReservationSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationSpec.

func (*ReservationSpec) DeepCopyInto

func (in *ReservationSpec) DeepCopyInto(out *ReservationSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ReservationSpecResource

type ReservationSpecResource struct {
	Timeouts *base.ResourceTimeout `json:"timeouts,omitempty" tf:"timeouts"`

	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// If false, any query using this reservation will use idle slots from other reservations within
	// the same admin project. If true, a query using this reservation will execute with the slot
	// capacity specified above at most.
	// +optional
	IgnoreIdleSlots *bool `json:"ignoreIdleSlots,omitempty" tf:"ignore_idle_slots"`
	// The geographic location where the transfer config should reside.
	// Examples: US, EU, asia-northeast1. The default value is US.
	// +optional
	Location *string `json:"location,omitempty" tf:"location"`
	// The name of the reservation. This field must only contain alphanumeric characters or dash.
	Name *string `json:"name" tf:"name"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// Minimum slots available to this reservation. A slot is a unit of computational power in BigQuery, and serves as the
	// unit of parallelism. Queries using this reservation might use more slots during runtime if ignoreIdleSlots is set to false.
	SlotCapacity *int64 `json:"slotCapacity" tf:"slot_capacity"`
}

func (*ReservationSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationSpecResource.

func (*ReservationSpecResource) DeepCopyInto

func (in *ReservationSpecResource) DeepCopyInto(out *ReservationSpecResource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ReservationStatus

type ReservationStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*ReservationStatus) DeepCopy

func (in *ReservationStatus) DeepCopy() *ReservationStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationStatus.

func (*ReservationStatus) DeepCopyInto

func (in *ReservationStatus) DeepCopyInto(out *ReservationStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Routine

type Routine struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              RoutineSpec   `json:"spec,omitempty"`
	Status            RoutineStatus `json:"status,omitempty"`
}

func (*Routine) DeepCopy

func (in *Routine) DeepCopy() *Routine

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Routine.

func (*Routine) DeepCopyInto

func (in *Routine) DeepCopyInto(out *Routine)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Routine) DeepCopyObject

func (in *Routine) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Routine) SetupWebhookWithManager

func (r *Routine) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Routine) ValidateCreate

func (r *Routine) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Routine) ValidateDelete

func (r *Routine) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*Routine) ValidateUpdate

func (r *Routine) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type RoutineList

type RoutineList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of Routine CRD objects
	Items []Routine `json:"items,omitempty"`
}

RoutineList is a list of Routines

func (*RoutineList) DeepCopy

func (in *RoutineList) DeepCopy() *RoutineList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineList.

func (*RoutineList) DeepCopyInto

func (in *RoutineList) DeepCopyInto(out *RoutineList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RoutineList) DeepCopyObject

func (in *RoutineList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type RoutineSpec

type RoutineSpec struct {
	State *RoutineSpecResource `json:"state,omitempty" tf:"-"`

	Resource RoutineSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*RoutineSpec) DeepCopy

func (in *RoutineSpec) DeepCopy() *RoutineSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineSpec.

func (*RoutineSpec) DeepCopyInto

func (in *RoutineSpec) DeepCopyInto(out *RoutineSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type RoutineSpecArguments

type RoutineSpecArguments struct {
	// Defaults to FIXED_TYPE. Default value: "FIXED_TYPE" Possible values: ["FIXED_TYPE", "ANY_TYPE"]
	// +optional
	ArgumentKind *string `json:"argumentKind,omitempty" tf:"argument_kind"`
	// A JSON schema for the data type. Required unless argumentKind = ANY_TYPE.
	// ~>**NOTE**: Because this field expects a JSON string, any changes to the string
	// will create a diff, even if the JSON itself hasn't changed. If the API returns
	// a different value for the same schema, e.g. it switched the order of values
	// or replaced STRUCT field type with RECORD field type, we currently cannot
	// suppress the recurring diff this causes. As a workaround, we recommend using
	// the schema as returned by the API.
	// +optional
	DataType *string `json:"dataType,omitempty" tf:"data_type"`
	// Specifies whether the argument is input or output. Can be set for procedures only. Possible values: ["IN", "OUT", "INOUT"]
	// +optional
	Mode *string `json:"mode,omitempty" tf:"mode"`
	// The name of this argument. Can be absent for function return argument.
	// +optional
	Name *string `json:"name,omitempty" tf:"name"`
}

func (*RoutineSpecArguments) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineSpecArguments.

func (*RoutineSpecArguments) DeepCopyInto

func (in *RoutineSpecArguments) DeepCopyInto(out *RoutineSpecArguments)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type RoutineSpecResource

type RoutineSpecResource struct {
	Timeouts *base.ResourceTimeout `json:"timeouts,omitempty" tf:"timeouts"`

	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// Input/output argument of a function or a stored procedure.
	// +optional
	Arguments []RoutineSpecArguments `json:"arguments,omitempty" tf:"arguments"`
	// The time when this routine was created, in milliseconds since the
	// epoch.
	// +optional
	CreationTime *int64 `json:"creationTime,omitempty" tf:"creation_time"`
	// The ID of the dataset containing this routine
	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// The body of the routine. For functions, this is the expression in the AS clause.
	// If language=SQL, it is the substring inside (but excluding) the parentheses.
	DefinitionBody *string `json:"definitionBody" tf:"definition_body"`
	// The description of the routine if defined.
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	// The determinism level of the JavaScript UDF if defined. Possible values: ["DETERMINISM_LEVEL_UNSPECIFIED", "DETERMINISTIC", "NOT_DETERMINISTIC"]
	// +optional
	DeterminismLevel *string `json:"determinismLevel,omitempty" tf:"determinism_level"`
	// Optional. If language = "JAVASCRIPT", this field stores the path of the
	// imported JAVASCRIPT libraries.
	// +optional
	ImportedLibraries []string `json:"importedLibraries,omitempty" tf:"imported_libraries"`
	// The language of the routine. Possible values: ["SQL", "JAVASCRIPT"]
	// +optional
	Language *string `json:"language,omitempty" tf:"language"`
	// The time when this routine was modified, in milliseconds since the
	// epoch.
	// +optional
	LastModifiedTime *int64 `json:"lastModifiedTime,omitempty" tf:"last_modified_time"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// Optional. Can be set only if routineType = "TABLE_VALUED_FUNCTION".
	//
	// If absent, the return table type is inferred from definitionBody at query time in each query
	// that references this routine. If present, then the columns in the evaluated table result will
	// be cast to match the column types specificed in return table type, at query time.
	// +optional
	ReturnTableType *string `json:"returnTableType,omitempty" tf:"return_table_type"`
	// A JSON schema for the return type. Optional if language = "SQL"; required otherwise.
	// If absent, the return type is inferred from definitionBody at query time in each query
	// that references this routine. If present, then the evaluated result will be cast to
	// the specified returned type at query time. ~>**NOTE**: Because this field expects a JSON
	// string, any changes to the string will create a diff, even if the JSON itself hasn't
	// changed. If the API returns a different value for the same schema, e.g. it switche
	// d the order of values or replaced STRUCT field type with RECORD field type, we currently
	// cannot suppress the recurring diff this causes. As a workaround, we recommend using
	// the schema as returned by the API.
	// +optional
	ReturnType *string `json:"returnType,omitempty" tf:"return_type"`
	// The ID of the the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
	RoutineID *string `json:"routineID" tf:"routine_id"`
	// The type of routine. Possible values: ["SCALAR_FUNCTION", "PROCEDURE", "TABLE_VALUED_FUNCTION"]
	// +optional
	RoutineType *string `json:"routineType,omitempty" tf:"routine_type"`
}

func (*RoutineSpecResource) DeepCopy

func (in *RoutineSpecResource) DeepCopy() *RoutineSpecResource

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineSpecResource.

func (*RoutineSpecResource) DeepCopyInto

func (in *RoutineSpecResource) DeepCopyInto(out *RoutineSpecResource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type RoutineStatus

type RoutineStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*RoutineStatus) DeepCopy

func (in *RoutineStatus) DeepCopy() *RoutineStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineStatus.

func (*RoutineStatus) DeepCopyInto

func (in *RoutineStatus) DeepCopyInto(out *RoutineStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Table

type Table struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              TableSpec   `json:"spec,omitempty"`
	Status            TableStatus `json:"status,omitempty"`
}

func (*Table) DeepCopy

func (in *Table) DeepCopy() *Table

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Table.

func (*Table) DeepCopyInto

func (in *Table) DeepCopyInto(out *Table)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Table) DeepCopyObject

func (in *Table) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Table) SetupWebhookWithManager

func (r *Table) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Table) ValidateCreate

func (r *Table) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Table) ValidateDelete

func (r *Table) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*Table) ValidateUpdate

func (r *Table) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type TableIamBinding

type TableIamBinding struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              TableIamBindingSpec   `json:"spec,omitempty"`
	Status            TableIamBindingStatus `json:"status,omitempty"`
}

func (*TableIamBinding) DeepCopy

func (in *TableIamBinding) DeepCopy() *TableIamBinding

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamBinding.

func (*TableIamBinding) DeepCopyInto

func (in *TableIamBinding) DeepCopyInto(out *TableIamBinding)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TableIamBinding) DeepCopyObject

func (in *TableIamBinding) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*TableIamBinding) SetupWebhookWithManager

func (r *TableIamBinding) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*TableIamBinding) ValidateCreate

func (r *TableIamBinding) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*TableIamBinding) ValidateDelete

func (r *TableIamBinding) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*TableIamBinding) ValidateUpdate

func (r *TableIamBinding) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type TableIamBindingList

type TableIamBindingList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of TableIamBinding CRD objects
	Items []TableIamBinding `json:"items,omitempty"`
}

TableIamBindingList is a list of TableIamBindings

func (*TableIamBindingList) DeepCopy

func (in *TableIamBindingList) DeepCopy() *TableIamBindingList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamBindingList.

func (*TableIamBindingList) DeepCopyInto

func (in *TableIamBindingList) DeepCopyInto(out *TableIamBindingList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TableIamBindingList) DeepCopyObject

func (in *TableIamBindingList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type TableIamBindingSpec

type TableIamBindingSpec struct {
	State *TableIamBindingSpecResource `json:"state,omitempty" tf:"-"`

	Resource TableIamBindingSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*TableIamBindingSpec) DeepCopy

func (in *TableIamBindingSpec) DeepCopy() *TableIamBindingSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamBindingSpec.

func (*TableIamBindingSpec) DeepCopyInto

func (in *TableIamBindingSpec) DeepCopyInto(out *TableIamBindingSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableIamBindingSpecCondition

type TableIamBindingSpecCondition struct {
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	Expression  *string `json:"expression" tf:"expression"`
	Title       *string `json:"title" tf:"title"`
}

func (*TableIamBindingSpecCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamBindingSpecCondition.

func (*TableIamBindingSpecCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableIamBindingSpecConditionCodec

type TableIamBindingSpecConditionCodec struct {
}

+k8s:deepcopy-gen=false

func (TableIamBindingSpecConditionCodec) Decode

func (TableIamBindingSpecConditionCodec) Encode

func (TableIamBindingSpecConditionCodec) IsEmpty

type TableIamBindingSpecResource

type TableIamBindingSpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// +optional
	Condition *TableIamBindingSpecCondition `json:"condition,omitempty" tf:"condition"`
	DatasetID *string                       `json:"datasetID" tf:"dataset_id"`
	// +optional
	Etag    *string  `json:"etag,omitempty" tf:"etag"`
	Members []string `json:"members" tf:"members"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	Role    *string `json:"role" tf:"role"`
	TableID *string `json:"tableID" tf:"table_id"`
}

func (*TableIamBindingSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamBindingSpecResource.

func (*TableIamBindingSpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableIamBindingStatus

type TableIamBindingStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*TableIamBindingStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamBindingStatus.

func (*TableIamBindingStatus) DeepCopyInto

func (in *TableIamBindingStatus) DeepCopyInto(out *TableIamBindingStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableIamMember

type TableIamMember struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              TableIamMemberSpec   `json:"spec,omitempty"`
	Status            TableIamMemberStatus `json:"status,omitempty"`
}

func (*TableIamMember) DeepCopy

func (in *TableIamMember) DeepCopy() *TableIamMember

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamMember.

func (*TableIamMember) DeepCopyInto

func (in *TableIamMember) DeepCopyInto(out *TableIamMember)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TableIamMember) DeepCopyObject

func (in *TableIamMember) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*TableIamMember) SetupWebhookWithManager

func (r *TableIamMember) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*TableIamMember) ValidateCreate

func (r *TableIamMember) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*TableIamMember) ValidateDelete

func (r *TableIamMember) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*TableIamMember) ValidateUpdate

func (r *TableIamMember) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type TableIamMemberList

type TableIamMemberList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of TableIamMember CRD objects
	Items []TableIamMember `json:"items,omitempty"`
}

TableIamMemberList is a list of TableIamMembers

func (*TableIamMemberList) DeepCopy

func (in *TableIamMemberList) DeepCopy() *TableIamMemberList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamMemberList.

func (*TableIamMemberList) DeepCopyInto

func (in *TableIamMemberList) DeepCopyInto(out *TableIamMemberList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TableIamMemberList) DeepCopyObject

func (in *TableIamMemberList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type TableIamMemberSpec

type TableIamMemberSpec struct {
	State *TableIamMemberSpecResource `json:"state,omitempty" tf:"-"`

	Resource TableIamMemberSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*TableIamMemberSpec) DeepCopy

func (in *TableIamMemberSpec) DeepCopy() *TableIamMemberSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamMemberSpec.

func (*TableIamMemberSpec) DeepCopyInto

func (in *TableIamMemberSpec) DeepCopyInto(out *TableIamMemberSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableIamMemberSpecCondition

type TableIamMemberSpecCondition struct {
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	Expression  *string `json:"expression" tf:"expression"`
	Title       *string `json:"title" tf:"title"`
}

func (*TableIamMemberSpecCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamMemberSpecCondition.

func (*TableIamMemberSpecCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableIamMemberSpecConditionCodec

type TableIamMemberSpecConditionCodec struct {
}

+k8s:deepcopy-gen=false

func (TableIamMemberSpecConditionCodec) Decode

func (TableIamMemberSpecConditionCodec) Encode

func (TableIamMemberSpecConditionCodec) IsEmpty

type TableIamMemberSpecResource

type TableIamMemberSpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// +optional
	Condition *TableIamMemberSpecCondition `json:"condition,omitempty" tf:"condition"`
	DatasetID *string                      `json:"datasetID" tf:"dataset_id"`
	// +optional
	Etag   *string `json:"etag,omitempty" tf:"etag"`
	Member *string `json:"member" tf:"member"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	Role    *string `json:"role" tf:"role"`
	TableID *string `json:"tableID" tf:"table_id"`
}

func (*TableIamMemberSpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamMemberSpecResource.

func (*TableIamMemberSpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableIamMemberStatus

type TableIamMemberStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*TableIamMemberStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamMemberStatus.

func (*TableIamMemberStatus) DeepCopyInto

func (in *TableIamMemberStatus) DeepCopyInto(out *TableIamMemberStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableIamPolicy

type TableIamPolicy struct {
	metav1.TypeMeta   `json:",inline,omitempty"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	Spec              TableIamPolicySpec   `json:"spec,omitempty"`
	Status            TableIamPolicyStatus `json:"status,omitempty"`
}

func (*TableIamPolicy) DeepCopy

func (in *TableIamPolicy) DeepCopy() *TableIamPolicy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamPolicy.

func (*TableIamPolicy) DeepCopyInto

func (in *TableIamPolicy) DeepCopyInto(out *TableIamPolicy)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TableIamPolicy) DeepCopyObject

func (in *TableIamPolicy) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*TableIamPolicy) SetupWebhookWithManager

func (r *TableIamPolicy) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*TableIamPolicy) ValidateCreate

func (r *TableIamPolicy) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*TableIamPolicy) ValidateDelete

func (r *TableIamPolicy) ValidateDelete() error

ValidateDelete implements webhook.Validator so a webhook will be registered for the type

func (*TableIamPolicy) ValidateUpdate

func (r *TableIamPolicy) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

type TableIamPolicyList

type TableIamPolicyList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of TableIamPolicy CRD objects
	Items []TableIamPolicy `json:"items,omitempty"`
}

TableIamPolicyList is a list of TableIamPolicys

func (*TableIamPolicyList) DeepCopy

func (in *TableIamPolicyList) DeepCopy() *TableIamPolicyList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamPolicyList.

func (*TableIamPolicyList) DeepCopyInto

func (in *TableIamPolicyList) DeepCopyInto(out *TableIamPolicyList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TableIamPolicyList) DeepCopyObject

func (in *TableIamPolicyList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type TableIamPolicySpec

type TableIamPolicySpec struct {
	State *TableIamPolicySpecResource `json:"state,omitempty" tf:"-"`

	Resource TableIamPolicySpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*TableIamPolicySpec) DeepCopy

func (in *TableIamPolicySpec) DeepCopy() *TableIamPolicySpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamPolicySpec.

func (*TableIamPolicySpec) DeepCopyInto

func (in *TableIamPolicySpec) DeepCopyInto(out *TableIamPolicySpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableIamPolicySpecResource

type TableIamPolicySpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// +optional
	Etag       *string `json:"etag,omitempty" tf:"etag"`
	PolicyData *string `json:"policyData" tf:"policy_data"`
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	TableID *string `json:"tableID" tf:"table_id"`
}

func (*TableIamPolicySpecResource) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamPolicySpecResource.

func (*TableIamPolicySpecResource) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableIamPolicyStatus

type TableIamPolicyStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*TableIamPolicyStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIamPolicyStatus.

func (*TableIamPolicyStatus) DeepCopyInto

func (in *TableIamPolicyStatus) DeepCopyInto(out *TableIamPolicyStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableList

type TableList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	// Items is a list of Table CRD objects
	Items []Table `json:"items,omitempty"`
}

TableList is a list of Tables

func (*TableList) DeepCopy

func (in *TableList) DeepCopy() *TableList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableList.

func (*TableList) DeepCopyInto

func (in *TableList) DeepCopyInto(out *TableList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TableList) DeepCopyObject

func (in *TableList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type TableSpec

type TableSpec struct {
	State *TableSpecResource `json:"state,omitempty" tf:"-"`

	Resource TableSpecResource `json:"resource" tf:"resource"`

	UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`

	TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`

	ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`

	BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}

func (*TableSpec) DeepCopy

func (in *TableSpec) DeepCopy() *TableSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpec.

func (*TableSpec) DeepCopyInto

func (in *TableSpec) DeepCopyInto(out *TableSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecEncryptionConfiguration

type TableSpecEncryptionConfiguration struct {
	// The self link or full name of a key which should be used to encrypt this table. Note that the default bigquery service account will need to have encrypt/decrypt permissions on this key - you may want to see the google_bigquery_default_service_account datasource and the google_kms_crypto_key_iam_binding resource.
	KmsKeyName *string `json:"kmsKeyName" tf:"kms_key_name"`
	// The self link or full name of the kms key version used to encrypt this table.
	// +optional
	KmsKeyVersion *string `json:"kmsKeyVersion,omitempty" tf:"kms_key_version"`
}

func (*TableSpecEncryptionConfiguration) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecEncryptionConfiguration.

func (*TableSpecEncryptionConfiguration) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecEncryptionConfigurationCodec

type TableSpecEncryptionConfigurationCodec struct {
}

+k8s:deepcopy-gen=false

func (TableSpecEncryptionConfigurationCodec) Decode

func (TableSpecEncryptionConfigurationCodec) Encode

func (TableSpecEncryptionConfigurationCodec) IsEmpty

type TableSpecExternalDataConfiguration

type TableSpecExternalDataConfiguration struct {
	// Let BigQuery try to autodetect the schema and format of the table.
	Autodetect *bool `json:"autodetect" tf:"autodetect"`
	// The compression type of the data source. Valid values are "NONE" or "GZIP".
	// +optional
	Compression *string `json:"compression,omitempty" tf:"compression"`
	// Additional properties to set if source_format is set to "CSV".
	// +optional
	CsvOptions *TableSpecExternalDataConfigurationCsvOptions `json:"csvOptions,omitempty" tf:"csv_options"`
	// Additional options if source_format is set to "GOOGLE_SHEETS".
	// +optional
	GoogleSheetsOptions *TableSpecExternalDataConfigurationGoogleSheetsOptions `json:"googleSheetsOptions,omitempty" tf:"google_sheets_options"`
	// When set, configures hive partitioning support. Not all storage formats support hive partitioning -- requesting hive partitioning on an unsupported format will lead to an error, as will providing an invalid specification.
	// +optional
	HivePartitioningOptions *TableSpecExternalDataConfigurationHivePartitioningOptions `json:"hivePartitioningOptions,omitempty" tf:"hive_partitioning_options"`
	// Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false.
	// +optional
	IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty" tf:"ignore_unknown_values"`
	// The maximum number of bad records that BigQuery can ignore when reading data.
	// +optional
	MaxBadRecords *int64 `json:"maxBadRecords,omitempty" tf:"max_bad_records"`
	// A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.
	// +optional
	Schema *string `json:"schema,omitempty" tf:"schema"`
	// The data format. Supported values are: "CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "PARQUET", "ORC" and "DATASTORE_BACKUP". To use "GOOGLE_SHEETS" the scopes must include "googleapis.com/auth/drive.readonly".
	SourceFormat *string `json:"sourceFormat" tf:"source_format"`
	// A list of the fully-qualified URIs that point to your data in Google Cloud.
	SourceUris []string `json:"sourceUris" tf:"source_uris"`
}

func (*TableSpecExternalDataConfiguration) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecExternalDataConfiguration.

func (*TableSpecExternalDataConfiguration) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecExternalDataConfigurationCodec

type TableSpecExternalDataConfigurationCodec struct {
}

+k8s:deepcopy-gen=false

func (TableSpecExternalDataConfigurationCodec) Decode

func (TableSpecExternalDataConfigurationCodec) Encode

func (TableSpecExternalDataConfigurationCodec) IsEmpty

type TableSpecExternalDataConfigurationCsvOptions

type TableSpecExternalDataConfigurationCsvOptions struct {
	// Indicates if BigQuery should accept rows that are missing trailing optional columns.
	// +optional
	AllowJaggedRows *bool `json:"allowJaggedRows,omitempty" tf:"allow_jagged_rows"`
	// Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false.
	// +optional
	AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty" tf:"allow_quoted_newlines"`
	// The character encoding of the data. The supported values are UTF-8 or ISO-8859-1.
	// +optional
	Encoding *string `json:"encoding,omitempty" tf:"encoding"`
	// The separator for fields in a CSV file.
	// +optional
	FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter"`
	// The value that is used to quote data sections in a CSV file. If your data does not contain quoted sections, set the property value to an empty string. If your data contains quoted newline characters, you must also set the allow_quoted_newlines property to true. The API-side default is ", specified in Terraform escaped as \". Due to limitations with Terraform default values, this value is required to be explicitly set.
	Quote *string `json:"quote" tf:"quote"`
	// The number of rows at the top of a CSV file that BigQuery will skip when reading the data.
	// +optional
	SkipLeadingRows *int64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows"`
}

func (*TableSpecExternalDataConfigurationCsvOptions) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecExternalDataConfigurationCsvOptions.

func (*TableSpecExternalDataConfigurationCsvOptions) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecExternalDataConfigurationCsvOptionsCodec

type TableSpecExternalDataConfigurationCsvOptionsCodec struct {
}

+k8s:deepcopy-gen=false

func (TableSpecExternalDataConfigurationCsvOptionsCodec) Decode

func (TableSpecExternalDataConfigurationCsvOptionsCodec) Encode

func (TableSpecExternalDataConfigurationCsvOptionsCodec) IsEmpty

type TableSpecExternalDataConfigurationGoogleSheetsOptions

type TableSpecExternalDataConfigurationGoogleSheetsOptions struct {
	// Range of a sheet to query from. Only used when non-empty. At least one of range or skip_leading_rows must be set. Typical format: "sheet_name!top_left_cell_id:bottom_right_cell_id" For example: "sheet1!A1:B20"
	// +optional
	Range *string `json:"range,omitempty" tf:"range"`
	// The number of rows at the top of the sheet that BigQuery will skip when reading the data. At least one of range or skip_leading_rows must be set.
	// +optional
	SkipLeadingRows *int64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows"`
}

func (*TableSpecExternalDataConfigurationGoogleSheetsOptions) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecExternalDataConfigurationGoogleSheetsOptions.

func (*TableSpecExternalDataConfigurationGoogleSheetsOptions) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecExternalDataConfigurationGoogleSheetsOptionsCodec

type TableSpecExternalDataConfigurationGoogleSheetsOptionsCodec struct {
}

+k8s:deepcopy-gen=false

func (TableSpecExternalDataConfigurationGoogleSheetsOptionsCodec) Decode

func (TableSpecExternalDataConfigurationGoogleSheetsOptionsCodec) Encode

func (TableSpecExternalDataConfigurationGoogleSheetsOptionsCodec) IsEmpty

type TableSpecExternalDataConfigurationHivePartitioningOptions

type TableSpecExternalDataConfigurationHivePartitioningOptions struct {
	// When set, what mode of hive partitioning to use when reading data.
	// +optional
	Mode *string `json:"mode,omitempty" tf:"mode"`
	// If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
	// +optional
	RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter"`
	// When hive partition detection is requested, a common for all source uris must be required. The prefix must end immediately before the partition key encoding begins.
	// +optional
	SourceURIPrefix *string `json:"sourceURIPrefix,omitempty" tf:"source_uri_prefix"`
}

func (*TableSpecExternalDataConfigurationHivePartitioningOptions) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecExternalDataConfigurationHivePartitioningOptions.

func (*TableSpecExternalDataConfigurationHivePartitioningOptions) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecExternalDataConfigurationHivePartitioningOptionsCodec

type TableSpecExternalDataConfigurationHivePartitioningOptionsCodec struct {
}

+k8s:deepcopy-gen=false

func (TableSpecExternalDataConfigurationHivePartitioningOptionsCodec) Decode

func (TableSpecExternalDataConfigurationHivePartitioningOptionsCodec) Encode

func (TableSpecExternalDataConfigurationHivePartitioningOptionsCodec) IsEmpty

type TableSpecMaterializedView

type TableSpecMaterializedView struct {
	// Specifies if BigQuery should automatically refresh materialized view when the base table is updated. The default is true.
	// +optional
	EnableRefresh *bool `json:"enableRefresh,omitempty" tf:"enable_refresh"`
	// A query whose result is persisted.
	Query *string `json:"query" tf:"query"`
	// Specifies maximum frequency at which this materialized view will be refreshed. The default is 1800000
	// +optional
	RefreshIntervalMs *int64 `json:"refreshIntervalMs,omitempty" tf:"refresh_interval_ms"`
}

func (*TableSpecMaterializedView) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecMaterializedView.

func (*TableSpecMaterializedView) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecMaterializedViewCodec

type TableSpecMaterializedViewCodec struct {
}

+k8s:deepcopy-gen=false

func (TableSpecMaterializedViewCodec) Decode

func (TableSpecMaterializedViewCodec) Encode

func (TableSpecMaterializedViewCodec) IsEmpty

type TableSpecRangePartitioning

type TableSpecRangePartitioning struct {
	// The field used to determine how to create a range-based partition.
	Field *string `json:"field" tf:"field"`
	// Information required to partition based on ranges. Structure is documented below.
	Range *TableSpecRangePartitioningRange `json:"range" tf:"range"`
}

func (*TableSpecRangePartitioning) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecRangePartitioning.

func (*TableSpecRangePartitioning) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecRangePartitioningCodec

type TableSpecRangePartitioningCodec struct {
}

+k8s:deepcopy-gen=false

func (TableSpecRangePartitioningCodec) Decode

func (TableSpecRangePartitioningCodec) Encode

func (TableSpecRangePartitioningCodec) IsEmpty

type TableSpecRangePartitioningRange

type TableSpecRangePartitioningRange struct {
	// End of the range partitioning, exclusive.
	End *int64 `json:"end" tf:"end"`
	// The width of each range within the partition.
	Interval *int64 `json:"interval" tf:"interval"`
	// Start of the range partitioning, inclusive.
	Start *int64 `json:"start" tf:"start"`
}

func (*TableSpecRangePartitioningRange) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecRangePartitioningRange.

func (*TableSpecRangePartitioningRange) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecRangePartitioningRangeCodec

type TableSpecRangePartitioningRangeCodec struct {
}

+k8s:deepcopy-gen=false

func (TableSpecRangePartitioningRangeCodec) Decode

func (TableSpecRangePartitioningRangeCodec) Encode

func (TableSpecRangePartitioningRangeCodec) IsEmpty

type TableSpecResource

type TableSpecResource struct {
	ID string `json:"id,omitempty" tf:"id,omitempty"`

	// Specifies column names to use for data clustering. Up to four top-level columns are allowed, and should be specified in descending priority order.
	// +optional
	// +kubebuilder:validation:MaxItems=4
	Clustering []string `json:"clustering,omitempty" tf:"clustering"`
	// The time when this table was created, in milliseconds since the epoch.
	// +optional
	CreationTime *int64 `json:"creationTime,omitempty" tf:"creation_time"`
	// The dataset ID to create the table in. Changing this forces a new resource to be created.
	DatasetID *string `json:"datasetID" tf:"dataset_id"`
	// Whether or not to allow Terraform to destroy the instance. Unless this field is set to false in Terraform state, a terraform destroy or terraform apply that would delete the instance will fail.
	// +optional
	DeletionProtection *bool `json:"deletionProtection,omitempty" tf:"deletion_protection"`
	// The field description.
	// +optional
	Description *string `json:"description,omitempty" tf:"description"`
	// Specifies how the table should be encrypted. If left blank, the table will be encrypted with a Google-managed key; that process is transparent to the user.
	// +optional
	EncryptionConfiguration *TableSpecEncryptionConfiguration `json:"encryptionConfiguration,omitempty" tf:"encryption_configuration"`
	// A hash of the resource.
	// +optional
	Etag *string `json:"etag,omitempty" tf:"etag"`
	// The time when this table expires, in milliseconds since the epoch. If not present, the table will persist indefinitely. Expired tables will be deleted and their storage reclaimed.
	// +optional
	ExpirationTime *int64 `json:"expirationTime,omitempty" tf:"expiration_time"`
	// Describes the data format, location, and other properties of a table stored outside of BigQuery. By defining these properties, the data source can then be queried as if it were a standard BigQuery table.
	// +optional
	ExternalDataConfiguration *TableSpecExternalDataConfiguration `json:"externalDataConfiguration,omitempty" tf:"external_data_configuration"`
	// A descriptive name for the table.
	// +optional
	FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name"`
	// A mapping of labels to assign to the resource.
	// +optional
	Labels *map[string]string `json:"labels,omitempty" tf:"labels"`
	// The time when this table was last modified, in milliseconds since the epoch.
	// +optional
	LastModifiedTime *int64 `json:"lastModifiedTime,omitempty" tf:"last_modified_time"`
	// The geographic location where the table resides. This value is inherited from the dataset.
	// +optional
	Location *string `json:"location,omitempty" tf:"location"`
	// If specified, configures this table as a materialized view.
	// +optional
	MaterializedView *TableSpecMaterializedView `json:"materializedView,omitempty" tf:"materialized_view"`
	// The geographic location where the table resides. This value is inherited from the dataset.
	// +optional
	NumBytes *int64 `json:"numBytes,omitempty" tf:"num_bytes"`
	// The number of bytes in the table that are considered "long-term storage".
	// +optional
	NumLongTermBytes *int64 `json:"numLongTermBytes,omitempty" tf:"num_long_term_bytes"`
	// The number of rows of data in this table, excluding any data in the streaming buffer.
	// +optional
	NumRows *int64 `json:"numRows,omitempty" tf:"num_rows"`
	// The ID of the project in which the resource belongs.
	// +optional
	Project *string `json:"project,omitempty" tf:"project"`
	// If specified, configures range-based partitioning for this table.
	// +optional
	RangePartitioning *TableSpecRangePartitioning `json:"rangePartitioning,omitempty" tf:"range_partitioning"`
	// A JSON schema for the table.
	// +optional
	Schema *string `json:"schema,omitempty" tf:"schema"`
	// The URI of the created resource.
	// +optional
	SelfLink *string `json:"selfLink,omitempty" tf:"self_link"`
	// A unique ID for the resource. Changing this forces a new resource to be created.
	TableID *string `json:"tableID" tf:"table_id"`
	// If specified, configures time-based partitioning for this table.
	// +optional
	TimePartitioning *TableSpecTimePartitioning `json:"timePartitioning,omitempty" tf:"time_partitioning"`
	// Describes the table type.
	// +optional
	Type *string `json:"type,omitempty" tf:"type"`
	// If specified, configures this table as a view.
	// +optional
	View *TableSpecView `json:"view,omitempty" tf:"view"`
}

func (*TableSpecResource) DeepCopy

func (in *TableSpecResource) DeepCopy() *TableSpecResource

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecResource.

func (*TableSpecResource) DeepCopyInto

func (in *TableSpecResource) DeepCopyInto(out *TableSpecResource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecTimePartitioning

type TableSpecTimePartitioning struct {
	// Number of milliseconds for which to keep the storage for a partition.
	// +optional
	ExpirationMs *int64 `json:"expirationMs,omitempty" tf:"expiration_ms"`
	// The field used to determine how to create a time-based partition. If time-based partitioning is enabled without this value, the table is partitioned based on the load time.
	// +optional
	Field *string `json:"field,omitempty" tf:"field"`
	// If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
	// +optional
	RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter"`
	// The supported types are DAY, HOUR, MONTH, and YEAR, which will generate one partition per day, hour, month, and year, respectively.
	Type *string `json:"type" tf:"type"`
}

func (*TableSpecTimePartitioning) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecTimePartitioning.

func (*TableSpecTimePartitioning) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecTimePartitioningCodec

type TableSpecTimePartitioningCodec struct {
}

+k8s:deepcopy-gen=false

func (TableSpecTimePartitioningCodec) Decode

func (TableSpecTimePartitioningCodec) Encode

func (TableSpecTimePartitioningCodec) IsEmpty

type TableSpecView

type TableSpecView struct {
	// A query that BigQuery executes when the view is referenced.
	Query *string `json:"query" tf:"query"`
	// Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL
	// +optional
	UseLegacySQL *bool `json:"useLegacySQL,omitempty" tf:"use_legacy_sql"`
}

func (*TableSpecView) DeepCopy

func (in *TableSpecView) DeepCopy() *TableSpecView

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpecView.

func (*TableSpecView) DeepCopyInto

func (in *TableSpecView) DeepCopyInto(out *TableSpecView)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TableSpecViewCodec

type TableSpecViewCodec struct {
}

+k8s:deepcopy-gen=false

func (TableSpecViewCodec) Decode

func (TableSpecViewCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator)

func (TableSpecViewCodec) Encode

func (TableSpecViewCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream)

func (TableSpecViewCodec) IsEmpty

func (TableSpecViewCodec) IsEmpty(ptr unsafe.Pointer) bool

type TableStatus

type TableStatus struct {
	// Resource generation, which is updated on mutation by the API Server.
	// +optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty"`
	// +optional
	Phase status.Status `json:"phase,omitempty"`
	// +optional
	Conditions []kmapi.Condition `json:"conditions,omitempty"`
}

func (*TableStatus) DeepCopy

func (in *TableStatus) DeepCopy() *TableStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableStatus.

func (*TableStatus) DeepCopyInto

func (in *TableStatus) DeepCopyInto(out *TableStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL