genproto: google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta2 Index | Files

package storage

import "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta2"

Index

Package Files

arrow.pb.go avro.pb.go protobuf.pb.go storage.pb.go stream.pb.go table.pb.go

Variables

var (
    ArrowSerializationOptions_Format_name = map[int32]string{
        0:  "FORMAT_UNSPECIFIED",
        1:  "ARROW_0_14",
        2:  "ARROW_0_15",
    }
    ArrowSerializationOptions_Format_value = map[string]int32{
        "FORMAT_UNSPECIFIED": 0,
        "ARROW_0_14":         1,
        "ARROW_0_15":         2,
    }
)

Enum value maps for ArrowSerializationOptions_Format.

var (
    StorageError_StorageErrorCode_name = map[int32]string{
        0:  "STORAGE_ERROR_CODE_UNSPECIFIED",
        1:  "TABLE_NOT_FOUND",
        2:  "STREAM_ALREADY_COMMITTED",
        3:  "STREAM_NOT_FOUND",
        4:  "INVALID_STREAM_TYPE",
        5:  "INVALID_STREAM_STATE",
    }
    StorageError_StorageErrorCode_value = map[string]int32{
        "STORAGE_ERROR_CODE_UNSPECIFIED": 0,
        "TABLE_NOT_FOUND":                1,
        "STREAM_ALREADY_COMMITTED":       2,
        "STREAM_NOT_FOUND":               3,
        "INVALID_STREAM_TYPE":            4,
        "INVALID_STREAM_STATE":           5,
    }
)

Enum value maps for StorageError_StorageErrorCode.

var (
    DataFormat_name = map[int32]string{
        0:  "DATA_FORMAT_UNSPECIFIED",
        1:  "AVRO",
        2:  "ARROW",
    }
    DataFormat_value = map[string]int32{
        "DATA_FORMAT_UNSPECIFIED": 0,
        "AVRO":                    1,
        "ARROW":                   2,
    }
)

Enum value maps for DataFormat.

var (
    WriteStream_Type_name = map[int32]string{
        0:  "TYPE_UNSPECIFIED",
        1:  "COMMITTED",
        2:  "PENDING",
        3:  "BUFFERED",
    }
    WriteStream_Type_value = map[string]int32{
        "TYPE_UNSPECIFIED": 0,
        "COMMITTED":        1,
        "PENDING":          2,
        "BUFFERED":         3,
    }
)

Enum value maps for WriteStream_Type.

var (
    TableFieldSchema_Type_name = map[int32]string{
        0:  "TYPE_UNSPECIFIED",
        1:  "STRING",
        2:  "INT64",
        3:  "DOUBLE",
        4:  "STRUCT",
        5:  "BYTES",
        6:  "BOOL",
        7:  "TIMESTAMP",
        8:  "DATE",
        9:  "TIME",
        10: "DATETIME",
        11: "GEOGRAPHY",
        12: "NUMERIC",
    }
    TableFieldSchema_Type_value = map[string]int32{
        "TYPE_UNSPECIFIED": 0,
        "STRING":           1,
        "INT64":            2,
        "DOUBLE":           3,
        "STRUCT":           4,
        "BYTES":            5,
        "BOOL":             6,
        "TIMESTAMP":        7,
        "DATE":             8,
        "TIME":             9,
        "DATETIME":         10,
        "GEOGRAPHY":        11,
        "NUMERIC":          12,
    }
)

Enum value maps for TableFieldSchema_Type.

var (
    TableFieldSchema_Mode_name = map[int32]string{
        0:  "MODE_UNSPECIFIED",
        1:  "NULLABLE",
        2:  "REQUIRED",
        3:  "REPEATED",
    }
    TableFieldSchema_Mode_value = map[string]int32{
        "MODE_UNSPECIFIED": 0,
        "NULLABLE":         1,
        "REQUIRED":         2,
        "REPEATED":         3,
    }
)

Enum value maps for TableFieldSchema_Mode.

var File_google_cloud_bigquery_storage_v1beta2_arrow_proto protoreflect.FileDescriptor
var File_google_cloud_bigquery_storage_v1beta2_avro_proto protoreflect.FileDescriptor
var File_google_cloud_bigquery_storage_v1beta2_protobuf_proto protoreflect.FileDescriptor
var File_google_cloud_bigquery_storage_v1beta2_storage_proto protoreflect.FileDescriptor
var File_google_cloud_bigquery_storage_v1beta2_stream_proto protoreflect.FileDescriptor
var File_google_cloud_bigquery_storage_v1beta2_table_proto protoreflect.FileDescriptor

func RegisterBigQueryReadServer Uses

func RegisterBigQueryReadServer(s *grpc.Server, srv BigQueryReadServer)

func RegisterBigQueryWriteServer Uses

func RegisterBigQueryWriteServer(s *grpc.Server, srv BigQueryWriteServer)

type AppendRowsRequest Uses

type AppendRowsRequest struct {

    // Required. The stream that is the target of the append operation. This value
    // must be specified for the initial request. If subsequent requests specify
    // the stream name, it must equal to the value provided in the first request.
    // To write to the _default stream, populate this field with a string in the
    // format `projects/{project}/datasets/{dataset}/tables/{table}/_default`.
    WriteStream string `protobuf:"bytes,1,opt,name=write_stream,json=writeStream,proto3" json:"write_stream,omitempty"`
    // If present, the write is only performed if the next append offset is same
    // as the provided value. If not present, the write is performed at the
    // current end of stream. Specifying a value for this field is not allowed
    // when calling AppendRows for the '_default' stream.
    Offset *wrapperspb.Int64Value `protobuf:"bytes,2,opt,name=offset,proto3" json:"offset,omitempty"`
    // Input rows. The `writer_schema` field must be specified at the initial
    // request and currently, it will be ignored if specified in following
    // requests. Following requests must have data in the same format as the
    // initial request.
    //
    // Types that are assignable to Rows:
    //	*AppendRowsRequest_ProtoRows
    Rows isAppendRowsRequest_Rows `protobuf_oneof:"rows"`
    // Id set by client to annotate its identity. Only initial request setting is
    // respected.
    TraceId string `protobuf:"bytes,6,opt,name=trace_id,json=traceId,proto3" json:"trace_id,omitempty"`
    // contains filtered or unexported fields
}

Request message for `AppendRows`.

func (*AppendRowsRequest) Descriptor Uses

func (*AppendRowsRequest) Descriptor() ([]byte, []int)

Deprecated: Use AppendRowsRequest.ProtoReflect.Descriptor instead.

func (*AppendRowsRequest) GetOffset Uses

func (x *AppendRowsRequest) GetOffset() *wrapperspb.Int64Value

func (*AppendRowsRequest) GetProtoRows Uses

func (x *AppendRowsRequest) GetProtoRows() *AppendRowsRequest_ProtoData

func (*AppendRowsRequest) GetRows Uses

func (m *AppendRowsRequest) GetRows() isAppendRowsRequest_Rows

func (*AppendRowsRequest) GetTraceId Uses

func (x *AppendRowsRequest) GetTraceId() string

func (*AppendRowsRequest) GetWriteStream Uses

func (x *AppendRowsRequest) GetWriteStream() string

func (*AppendRowsRequest) ProtoMessage Uses

func (*AppendRowsRequest) ProtoMessage()

func (*AppendRowsRequest) ProtoReflect Uses

func (x *AppendRowsRequest) ProtoReflect() protoreflect.Message

func (*AppendRowsRequest) Reset Uses

func (x *AppendRowsRequest) Reset()

func (*AppendRowsRequest) String Uses

func (x *AppendRowsRequest) String() string

type AppendRowsRequest_ProtoData Uses

type AppendRowsRequest_ProtoData struct {

    // Proto schema used to serialize the data.
    WriterSchema *ProtoSchema `protobuf:"bytes,1,opt,name=writer_schema,json=writerSchema,proto3" json:"writer_schema,omitempty"`
    // Serialized row data in protobuf message format.
    Rows *ProtoRows `protobuf:"bytes,2,opt,name=rows,proto3" json:"rows,omitempty"`
    // contains filtered or unexported fields
}

Proto schema and data.

func (*AppendRowsRequest_ProtoData) Descriptor Uses

func (*AppendRowsRequest_ProtoData) Descriptor() ([]byte, []int)

Deprecated: Use AppendRowsRequest_ProtoData.ProtoReflect.Descriptor instead.

func (*AppendRowsRequest_ProtoData) GetRows Uses

func (x *AppendRowsRequest_ProtoData) GetRows() *ProtoRows

func (*AppendRowsRequest_ProtoData) GetWriterSchema Uses

func (x *AppendRowsRequest_ProtoData) GetWriterSchema() *ProtoSchema

func (*AppendRowsRequest_ProtoData) ProtoMessage Uses

func (*AppendRowsRequest_ProtoData) ProtoMessage()

func (*AppendRowsRequest_ProtoData) ProtoReflect Uses

func (x *AppendRowsRequest_ProtoData) ProtoReflect() protoreflect.Message

func (*AppendRowsRequest_ProtoData) Reset Uses

func (x *AppendRowsRequest_ProtoData) Reset()

func (*AppendRowsRequest_ProtoData) String Uses

func (x *AppendRowsRequest_ProtoData) String() string

type AppendRowsRequest_ProtoRows Uses

type AppendRowsRequest_ProtoRows struct {
    // Rows in proto format.
    ProtoRows *AppendRowsRequest_ProtoData `protobuf:"bytes,4,opt,name=proto_rows,json=protoRows,proto3,oneof"`
}

type AppendRowsResponse Uses

type AppendRowsResponse struct {

    // Types that are assignable to Response:
    //	*AppendRowsResponse_AppendResult_
    //	*AppendRowsResponse_Error
    Response isAppendRowsResponse_Response `protobuf_oneof:"response"`
    // If backend detects a schema update, pass it to user so that user can
    // use it to input new type of message. It will be empty when there is no
    // schema updates.
    UpdatedSchema *TableSchema `protobuf:"bytes,3,opt,name=updated_schema,json=updatedSchema,proto3" json:"updated_schema,omitempty"`
    // contains filtered or unexported fields
}

Response message for `AppendRows`.

func (*AppendRowsResponse) Descriptor Uses

func (*AppendRowsResponse) Descriptor() ([]byte, []int)

Deprecated: Use AppendRowsResponse.ProtoReflect.Descriptor instead.

func (*AppendRowsResponse) GetAppendResult Uses

func (x *AppendRowsResponse) GetAppendResult() *AppendRowsResponse_AppendResult

func (*AppendRowsResponse) GetError Uses

func (x *AppendRowsResponse) GetError() *status.Status

func (*AppendRowsResponse) GetResponse Uses

func (m *AppendRowsResponse) GetResponse() isAppendRowsResponse_Response

func (*AppendRowsResponse) GetUpdatedSchema Uses

func (x *AppendRowsResponse) GetUpdatedSchema() *TableSchema

func (*AppendRowsResponse) ProtoMessage Uses

func (*AppendRowsResponse) ProtoMessage()

func (*AppendRowsResponse) ProtoReflect Uses

func (x *AppendRowsResponse) ProtoReflect() protoreflect.Message

func (*AppendRowsResponse) Reset Uses

func (x *AppendRowsResponse) Reset()

func (*AppendRowsResponse) String Uses

func (x *AppendRowsResponse) String() string

type AppendRowsResponse_AppendResult Uses

type AppendRowsResponse_AppendResult struct {

    // The row offset at which the last append occurred. The offset will not be
    // set if appending using default streams.
    Offset *wrapperspb.Int64Value `protobuf:"bytes,1,opt,name=offset,proto3" json:"offset,omitempty"`
    // contains filtered or unexported fields
}

A success append result.

func (*AppendRowsResponse_AppendResult) Descriptor Uses

func (*AppendRowsResponse_AppendResult) Descriptor() ([]byte, []int)

Deprecated: Use AppendRowsResponse_AppendResult.ProtoReflect.Descriptor instead.

func (*AppendRowsResponse_AppendResult) GetOffset Uses

func (x *AppendRowsResponse_AppendResult) GetOffset() *wrapperspb.Int64Value

func (*AppendRowsResponse_AppendResult) ProtoMessage Uses

func (*AppendRowsResponse_AppendResult) ProtoMessage()

func (*AppendRowsResponse_AppendResult) ProtoReflect Uses

func (x *AppendRowsResponse_AppendResult) ProtoReflect() protoreflect.Message

func (*AppendRowsResponse_AppendResult) Reset Uses

func (x *AppendRowsResponse_AppendResult) Reset()

func (*AppendRowsResponse_AppendResult) String Uses

func (x *AppendRowsResponse_AppendResult) String() string

type AppendRowsResponse_AppendResult_ Uses

type AppendRowsResponse_AppendResult_ struct {
    // Result if the append is successful.
    AppendResult *AppendRowsResponse_AppendResult `protobuf:"bytes,1,opt,name=append_result,json=appendResult,proto3,oneof"`
}

type AppendRowsResponse_Error Uses

type AppendRowsResponse_Error struct {
    // Error in case of request failed. If set, it means rows are not accepted
    // into the system. Users can retry or continue with other requests within
    // the same connection.
    // ALREADY_EXISTS: happens when offset is specified, it means the entire
    //   request is already appended, it is safe to ignore this error.
    // OUT_OF_RANGE: happens when offset is specified, it means the specified
    //   offset is beyond the end of the stream.
    // INVALID_ARGUMENT: error caused by malformed request or data.
    // RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
    //   append without offset.
    // ABORTED: request processing is aborted because of prior failures, request
    //   can be retried if previous failure is fixed.
    // INTERNAL: server side errors that can be retried.
    Error *status.Status `protobuf:"bytes,2,opt,name=error,proto3,oneof"`
}

type ArrowRecordBatch Uses

type ArrowRecordBatch struct {

    // IPC-serialized Arrow RecordBatch.
    SerializedRecordBatch []byte `protobuf:"bytes,1,opt,name=serialized_record_batch,json=serializedRecordBatch,proto3" json:"serialized_record_batch,omitempty"`
    // contains filtered or unexported fields
}

Arrow RecordBatch.

func (*ArrowRecordBatch) Descriptor Uses

func (*ArrowRecordBatch) Descriptor() ([]byte, []int)

Deprecated: Use ArrowRecordBatch.ProtoReflect.Descriptor instead.

func (*ArrowRecordBatch) GetSerializedRecordBatch Uses

func (x *ArrowRecordBatch) GetSerializedRecordBatch() []byte

func (*ArrowRecordBatch) ProtoMessage Uses

func (*ArrowRecordBatch) ProtoMessage()

func (*ArrowRecordBatch) ProtoReflect Uses

func (x *ArrowRecordBatch) ProtoReflect() protoreflect.Message

func (*ArrowRecordBatch) Reset Uses

func (x *ArrowRecordBatch) Reset()

func (*ArrowRecordBatch) String Uses

func (x *ArrowRecordBatch) String() string

type ArrowSchema Uses

type ArrowSchema struct {

    // IPC serialized Arrow schema.
    SerializedSchema []byte `protobuf:"bytes,1,opt,name=serialized_schema,json=serializedSchema,proto3" json:"serialized_schema,omitempty"`
    // contains filtered or unexported fields
}

Arrow schema as specified in https://arrow.apache.org/docs/python/api/datatypes.html and serialized to bytes using IPC: https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc

See code samples on how this message can be deserialized.

func (*ArrowSchema) Descriptor Uses

func (*ArrowSchema) Descriptor() ([]byte, []int)

Deprecated: Use ArrowSchema.ProtoReflect.Descriptor instead.

func (*ArrowSchema) GetSerializedSchema Uses

func (x *ArrowSchema) GetSerializedSchema() []byte

func (*ArrowSchema) ProtoMessage Uses

func (*ArrowSchema) ProtoMessage()

func (*ArrowSchema) ProtoReflect Uses

func (x *ArrowSchema) ProtoReflect() protoreflect.Message

func (*ArrowSchema) Reset Uses

func (x *ArrowSchema) Reset()

func (*ArrowSchema) String Uses

func (x *ArrowSchema) String() string

type ArrowSerializationOptions Uses

type ArrowSerializationOptions struct {

    // The Arrow IPC format to use.
    Format ArrowSerializationOptions_Format `protobuf:"varint,1,opt,name=format,proto3,enum=google.cloud.bigquery.storage.v1beta2.ArrowSerializationOptions_Format" json:"format,omitempty"`
    // contains filtered or unexported fields
}

Contains options specific to Arrow Serialization.

func (*ArrowSerializationOptions) Descriptor Uses

func (*ArrowSerializationOptions) Descriptor() ([]byte, []int)

Deprecated: Use ArrowSerializationOptions.ProtoReflect.Descriptor instead.

func (*ArrowSerializationOptions) GetFormat Uses

func (x *ArrowSerializationOptions) GetFormat() ArrowSerializationOptions_Format

func (*ArrowSerializationOptions) ProtoMessage Uses

func (*ArrowSerializationOptions) ProtoMessage()

func (*ArrowSerializationOptions) ProtoReflect Uses

func (x *ArrowSerializationOptions) ProtoReflect() protoreflect.Message

func (*ArrowSerializationOptions) Reset Uses

func (x *ArrowSerializationOptions) Reset()

func (*ArrowSerializationOptions) String Uses

func (x *ArrowSerializationOptions) String() string

type ArrowSerializationOptions_Format Uses

type ArrowSerializationOptions_Format int32

The IPC format to use when serializing Arrow streams.

const (
    // If unspecied the IPC format as of 0.15 release will be used.
    ArrowSerializationOptions_FORMAT_UNSPECIFIED ArrowSerializationOptions_Format = 0
    // Use the legacy IPC message format as of Apache Arrow Release 0.14.
    ArrowSerializationOptions_ARROW_0_14 ArrowSerializationOptions_Format = 1
    // Use the message format as of Apache Arrow Release 0.15.
    ArrowSerializationOptions_ARROW_0_15 ArrowSerializationOptions_Format = 2
)

func (ArrowSerializationOptions_Format) Descriptor Uses

func (ArrowSerializationOptions_Format) Descriptor() protoreflect.EnumDescriptor

func (ArrowSerializationOptions_Format) Enum Uses

func (x ArrowSerializationOptions_Format) Enum() *ArrowSerializationOptions_Format

func (ArrowSerializationOptions_Format) EnumDescriptor Uses

func (ArrowSerializationOptions_Format) EnumDescriptor() ([]byte, []int)

Deprecated: Use ArrowSerializationOptions_Format.Descriptor instead.

func (ArrowSerializationOptions_Format) Number Uses

func (x ArrowSerializationOptions_Format) Number() protoreflect.EnumNumber

func (ArrowSerializationOptions_Format) String Uses

func (x ArrowSerializationOptions_Format) String() string

func (ArrowSerializationOptions_Format) Type Uses

func (ArrowSerializationOptions_Format) Type() protoreflect.EnumType

type AvroRows Uses

type AvroRows struct {

    // Binary serialized rows in a block.
    SerializedBinaryRows []byte `protobuf:"bytes,1,opt,name=serialized_binary_rows,json=serializedBinaryRows,proto3" json:"serialized_binary_rows,omitempty"`
    // contains filtered or unexported fields
}

Avro rows.

func (*AvroRows) Descriptor Uses

func (*AvroRows) Descriptor() ([]byte, []int)

Deprecated: Use AvroRows.ProtoReflect.Descriptor instead.

func (*AvroRows) GetSerializedBinaryRows Uses

func (x *AvroRows) GetSerializedBinaryRows() []byte

func (*AvroRows) ProtoMessage Uses

func (*AvroRows) ProtoMessage()

func (*AvroRows) ProtoReflect Uses

func (x *AvroRows) ProtoReflect() protoreflect.Message

func (*AvroRows) Reset Uses

func (x *AvroRows) Reset()

func (*AvroRows) String Uses

func (x *AvroRows) String() string

type AvroSchema Uses

type AvroSchema struct {

    // Json serialized schema, as described at
    // https://avro.apache.org/docs/1.8.1/spec.html.
    Schema string `protobuf:"bytes,1,opt,name=schema,proto3" json:"schema,omitempty"`
    // contains filtered or unexported fields
}

Avro schema.

func (*AvroSchema) Descriptor Uses

func (*AvroSchema) Descriptor() ([]byte, []int)

Deprecated: Use AvroSchema.ProtoReflect.Descriptor instead.

func (*AvroSchema) GetSchema Uses

func (x *AvroSchema) GetSchema() string

func (*AvroSchema) ProtoMessage Uses

func (*AvroSchema) ProtoMessage()

func (*AvroSchema) ProtoReflect Uses

func (x *AvroSchema) ProtoReflect() protoreflect.Message

func (*AvroSchema) Reset Uses

func (x *AvroSchema) Reset()

func (*AvroSchema) String Uses

func (x *AvroSchema) String() string

type BatchCommitWriteStreamsRequest Uses

type BatchCommitWriteStreamsRequest struct {

    // Required. Parent table that all the streams should belong to, in the form
    // of `projects/{project}/datasets/{dataset}/tables/{table}`.
    Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
    // Required. The group of streams that will be committed atomically.
    WriteStreams []string `protobuf:"bytes,2,rep,name=write_streams,json=writeStreams,proto3" json:"write_streams,omitempty"`
    // contains filtered or unexported fields
}

Request message for `BatchCommitWriteStreams`.

func (*BatchCommitWriteStreamsRequest) Descriptor Uses

func (*BatchCommitWriteStreamsRequest) Descriptor() ([]byte, []int)

Deprecated: Use BatchCommitWriteStreamsRequest.ProtoReflect.Descriptor instead.

func (*BatchCommitWriteStreamsRequest) GetParent Uses

func (x *BatchCommitWriteStreamsRequest) GetParent() string

func (*BatchCommitWriteStreamsRequest) GetWriteStreams Uses

func (x *BatchCommitWriteStreamsRequest) GetWriteStreams() []string

func (*BatchCommitWriteStreamsRequest) ProtoMessage Uses

func (*BatchCommitWriteStreamsRequest) ProtoMessage()

func (*BatchCommitWriteStreamsRequest) ProtoReflect Uses

func (x *BatchCommitWriteStreamsRequest) ProtoReflect() protoreflect.Message

func (*BatchCommitWriteStreamsRequest) Reset Uses

func (x *BatchCommitWriteStreamsRequest) Reset()

func (*BatchCommitWriteStreamsRequest) String Uses

func (x *BatchCommitWriteStreamsRequest) String() string

type BatchCommitWriteStreamsResponse Uses

type BatchCommitWriteStreamsResponse struct {

    // The time at which streams were committed in microseconds granularity.
    // This field will only exist when there is no stream errors.
    CommitTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=commit_time,json=commitTime,proto3" json:"commit_time,omitempty"`
    // Stream level error if commit failed. Only streams with error will be in
    // the list.
    StreamErrors []*StorageError `protobuf:"bytes,2,rep,name=stream_errors,json=streamErrors,proto3" json:"stream_errors,omitempty"`
    // contains filtered or unexported fields
}

Response message for `BatchCommitWriteStreams`.

func (*BatchCommitWriteStreamsResponse) Descriptor Uses

func (*BatchCommitWriteStreamsResponse) Descriptor() ([]byte, []int)

Deprecated: Use BatchCommitWriteStreamsResponse.ProtoReflect.Descriptor instead.

func (*BatchCommitWriteStreamsResponse) GetCommitTime Uses

func (x *BatchCommitWriteStreamsResponse) GetCommitTime() *timestamppb.Timestamp

func (*BatchCommitWriteStreamsResponse) GetStreamErrors Uses

func (x *BatchCommitWriteStreamsResponse) GetStreamErrors() []*StorageError

func (*BatchCommitWriteStreamsResponse) ProtoMessage Uses

func (*BatchCommitWriteStreamsResponse) ProtoMessage()

func (*BatchCommitWriteStreamsResponse) ProtoReflect Uses

func (x *BatchCommitWriteStreamsResponse) ProtoReflect() protoreflect.Message

func (*BatchCommitWriteStreamsResponse) Reset Uses

func (x *BatchCommitWriteStreamsResponse) Reset()

func (*BatchCommitWriteStreamsResponse) String Uses

func (x *BatchCommitWriteStreamsResponse) String() string

type BigQueryReadClient Uses

type BigQueryReadClient interface {
    // Creates a new read session. A read session divides the contents of a
    // BigQuery table into one or more streams, which can then be used to read
    // data from the table. The read session also specifies properties of the
    // data to be read, such as a list of columns or a push-down filter describing
    // the rows to be returned.
    //
    // A particular row can be read by at most one stream. When the caller has
    // reached the end of each stream in the session, then all the data in the
    // table has been read.
    //
    // Data is assigned to each stream such that roughly the same number of
    // rows can be read from each stream. Because the server-side unit for
    // assigning data is collections of rows, the API does not guarantee that
    // each stream will return the same number or rows. Additionally, the
    // limits are enforced based on the number of pre-filtered rows, so some
    // filters can lead to lopsided assignments.
    //
    // Read sessions automatically expire 24 hours after they are created and do
    // not require manual clean-up by the caller.
    CreateReadSession(ctx context.Context, in *CreateReadSessionRequest, opts ...grpc.CallOption) (*ReadSession, error)
    // Reads rows from the stream in the format prescribed by the ReadSession.
    // Each response contains one or more table rows, up to a maximum of 100 MiB
    // per response; read requests which attempt to read individual rows larger
    // than 100 MiB will fail.
    //
    // Each request also returns a set of stream statistics reflecting the current
    // state of the stream.
    ReadRows(ctx context.Context, in *ReadRowsRequest, opts ...grpc.CallOption) (BigQueryRead_ReadRowsClient, error)
    // Splits a given `ReadStream` into two `ReadStream` objects. These
    // `ReadStream` objects are referred to as the primary and the residual
    // streams of the split. The original `ReadStream` can still be read from in
    // the same manner as before. Both of the returned `ReadStream` objects can
    // also be read from, and the rows returned by both child streams will be
    // the same as the rows read from the original stream.
    //
    // Moreover, the two child streams will be allocated back-to-back in the
    // original `ReadStream`. Concretely, it is guaranteed that for streams
    // original, primary, and residual, that original[0-j] = primary[0-j] and
    // original[j-n] = residual[0-m] once the streams have been read to
    // completion.
    SplitReadStream(ctx context.Context, in *SplitReadStreamRequest, opts ...grpc.CallOption) (*SplitReadStreamResponse, error)
}

BigQueryReadClient is the client API for BigQueryRead service.

For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.

func NewBigQueryReadClient Uses

func NewBigQueryReadClient(cc grpc.ClientConnInterface) BigQueryReadClient

type BigQueryReadServer Uses

type BigQueryReadServer interface {
    // Creates a new read session. A read session divides the contents of a
    // BigQuery table into one or more streams, which can then be used to read
    // data from the table. The read session also specifies properties of the
    // data to be read, such as a list of columns or a push-down filter describing
    // the rows to be returned.
    //
    // A particular row can be read by at most one stream. When the caller has
    // reached the end of each stream in the session, then all the data in the
    // table has been read.
    //
    // Data is assigned to each stream such that roughly the same number of
    // rows can be read from each stream. Because the server-side unit for
    // assigning data is collections of rows, the API does not guarantee that
    // each stream will return the same number or rows. Additionally, the
    // limits are enforced based on the number of pre-filtered rows, so some
    // filters can lead to lopsided assignments.
    //
    // Read sessions automatically expire 24 hours after they are created and do
    // not require manual clean-up by the caller.
    CreateReadSession(context.Context, *CreateReadSessionRequest) (*ReadSession, error)
    // Reads rows from the stream in the format prescribed by the ReadSession.
    // Each response contains one or more table rows, up to a maximum of 100 MiB
    // per response; read requests which attempt to read individual rows larger
    // than 100 MiB will fail.
    //
    // Each request also returns a set of stream statistics reflecting the current
    // state of the stream.
    ReadRows(*ReadRowsRequest, BigQueryRead_ReadRowsServer) error
    // Splits a given `ReadStream` into two `ReadStream` objects. These
    // `ReadStream` objects are referred to as the primary and the residual
    // streams of the split. The original `ReadStream` can still be read from in
    // the same manner as before. Both of the returned `ReadStream` objects can
    // also be read from, and the rows returned by both child streams will be
    // the same as the rows read from the original stream.
    //
    // Moreover, the two child streams will be allocated back-to-back in the
    // original `ReadStream`. Concretely, it is guaranteed that for streams
    // original, primary, and residual, that original[0-j] = primary[0-j] and
    // original[j-n] = residual[0-m] once the streams have been read to
    // completion.
    SplitReadStream(context.Context, *SplitReadStreamRequest) (*SplitReadStreamResponse, error)
}

BigQueryReadServer is the server API for BigQueryRead service.

type BigQueryRead_ReadRowsClient Uses

type BigQueryRead_ReadRowsClient interface {
    Recv() (*ReadRowsResponse, error)
    grpc.ClientStream
}

type BigQueryRead_ReadRowsServer Uses

type BigQueryRead_ReadRowsServer interface {
    Send(*ReadRowsResponse) error
    grpc.ServerStream
}

type BigQueryWriteClient Uses

type BigQueryWriteClient interface {
    // Creates a write stream to the given table.
    // Additionally, every table has a special COMMITTED stream named '_default'
    // to which data can be written. This stream doesn't need to be created using
    // CreateWriteStream. It is a stream that can be used simultaneously by any
    // number of clients. Data written to this stream is considered committed as
    // soon as an acknowledgement is received.
    CreateWriteStream(ctx context.Context, in *CreateWriteStreamRequest, opts ...grpc.CallOption) (*WriteStream, error)
    // Appends data to the given stream.
    //
    // If `offset` is specified, the `offset` is checked against the end of
    // stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an
    // attempt is made to append to an offset beyond the current end of the stream
    // or `ALREADY_EXISTS` if user provids an `offset` that has already been
    // written to. User can retry with adjusted offset within the same RPC
    // stream. If `offset` is not specified, append happens at the end of the
    // stream.
    //
    // The response contains the offset at which the append happened. Responses
    // are received in the same order in which requests are sent. There will be
    // one response for each successful request. If the `offset` is not set in
    // response, it means append didn't happen due to some errors. If one request
    // fails, all the subsequent requests will also fail until a success request
    // is made again.
    //
    // If the stream is of `PENDING` type, data will only be available for read
    // operations after the stream is committed.
    AppendRows(ctx context.Context, opts ...grpc.CallOption) (BigQueryWrite_AppendRowsClient, error)
    // Gets a write stream.
    GetWriteStream(ctx context.Context, in *GetWriteStreamRequest, opts ...grpc.CallOption) (*WriteStream, error)
    // Finalize a write stream so that no new data can be appended to the
    // stream. Finalize is not supported on the '_default' stream.
    FinalizeWriteStream(ctx context.Context, in *FinalizeWriteStreamRequest, opts ...grpc.CallOption) (*FinalizeWriteStreamResponse, error)
    // Atomically commits a group of `PENDING` streams that belong to the same
    // `parent` table.
    // Streams must be finalized before commit and cannot be committed multiple
    // times. Once a stream is committed, data in the stream becomes available
    // for read operations.
    BatchCommitWriteStreams(ctx context.Context, in *BatchCommitWriteStreamsRequest, opts ...grpc.CallOption) (*BatchCommitWriteStreamsResponse, error)
    // Flushes rows to a BUFFERED stream.
    // If users are appending rows to BUFFERED stream, flush operation is
    // required in order for the rows to become available for reading. A
    // Flush operation flushes up to any previously flushed offset in a BUFFERED
    // stream, to the offset specified in the request.
    // Flush is not supported on the _default stream, since it is not BUFFERED.
    FlushRows(ctx context.Context, in *FlushRowsRequest, opts ...grpc.CallOption) (*FlushRowsResponse, error)
}

BigQueryWriteClient is the client API for BigQueryWrite service.

For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.

func NewBigQueryWriteClient Uses

func NewBigQueryWriteClient(cc grpc.ClientConnInterface) BigQueryWriteClient

type BigQueryWriteServer Uses

type BigQueryWriteServer interface {
    // Creates a write stream to the given table.
    // Additionally, every table has a special COMMITTED stream named '_default'
    // to which data can be written. This stream doesn't need to be created using
    // CreateWriteStream. It is a stream that can be used simultaneously by any
    // number of clients. Data written to this stream is considered committed as
    // soon as an acknowledgement is received.
    CreateWriteStream(context.Context, *CreateWriteStreamRequest) (*WriteStream, error)
    // Appends data to the given stream.
    //
    // If `offset` is specified, the `offset` is checked against the end of
    // stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an
    // attempt is made to append to an offset beyond the current end of the stream
    // or `ALREADY_EXISTS` if user provids an `offset` that has already been
    // written to. User can retry with adjusted offset within the same RPC
    // stream. If `offset` is not specified, append happens at the end of the
    // stream.
    //
    // The response contains the offset at which the append happened. Responses
    // are received in the same order in which requests are sent. There will be
    // one response for each successful request. If the `offset` is not set in
    // response, it means append didn't happen due to some errors. If one request
    // fails, all the subsequent requests will also fail until a success request
    // is made again.
    //
    // If the stream is of `PENDING` type, data will only be available for read
    // operations after the stream is committed.
    AppendRows(BigQueryWrite_AppendRowsServer) error
    // Gets a write stream.
    GetWriteStream(context.Context, *GetWriteStreamRequest) (*WriteStream, error)
    // Finalize a write stream so that no new data can be appended to the
    // stream. Finalize is not supported on the '_default' stream.
    FinalizeWriteStream(context.Context, *FinalizeWriteStreamRequest) (*FinalizeWriteStreamResponse, error)
    // Atomically commits a group of `PENDING` streams that belong to the same
    // `parent` table.
    // Streams must be finalized before commit and cannot be committed multiple
    // times. Once a stream is committed, data in the stream becomes available
    // for read operations.
    BatchCommitWriteStreams(context.Context, *BatchCommitWriteStreamsRequest) (*BatchCommitWriteStreamsResponse, error)
    // Flushes rows to a BUFFERED stream.
    // If users are appending rows to BUFFERED stream, flush operation is
    // required in order for the rows to become available for reading. A
    // Flush operation flushes up to any previously flushed offset in a BUFFERED
    // stream, to the offset specified in the request.
    // Flush is not supported on the _default stream, since it is not BUFFERED.
    FlushRows(context.Context, *FlushRowsRequest) (*FlushRowsResponse, error)
}

BigQueryWriteServer is the server API for BigQueryWrite service.

type BigQueryWrite_AppendRowsClient Uses

type BigQueryWrite_AppendRowsClient interface {
    Send(*AppendRowsRequest) error
    Recv() (*AppendRowsResponse, error)
    grpc.ClientStream
}

type BigQueryWrite_AppendRowsServer Uses

type BigQueryWrite_AppendRowsServer interface {
    Send(*AppendRowsResponse) error
    Recv() (*AppendRowsRequest, error)
    grpc.ServerStream
}

type CreateReadSessionRequest Uses

type CreateReadSessionRequest struct {

    // Required. The request project that owns the session, in the form of
    // `projects/{project_id}`.
    Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
    // Required. Session to be created.
    ReadSession *ReadSession `protobuf:"bytes,2,opt,name=read_session,json=readSession,proto3" json:"read_session,omitempty"`
    // Max initial number of streams. If unset or zero, the server will
    // provide a value of streams so as to produce reasonable throughput. Must be
    // non-negative. The number of streams may be lower than the requested number,
    // depending on the amount parallelism that is reasonable for the table. Error
    // will be returned if the max count is greater than the current system
    // max limit of 1,000.
    //
    // Streams must be read starting from offset 0.
    MaxStreamCount int32 `protobuf:"varint,3,opt,name=max_stream_count,json=maxStreamCount,proto3" json:"max_stream_count,omitempty"`
    // contains filtered or unexported fields
}

Request message for `CreateReadSession`.

func (*CreateReadSessionRequest) Descriptor Uses

func (*CreateReadSessionRequest) Descriptor() ([]byte, []int)

Deprecated: Use CreateReadSessionRequest.ProtoReflect.Descriptor instead.

func (*CreateReadSessionRequest) GetMaxStreamCount Uses

func (x *CreateReadSessionRequest) GetMaxStreamCount() int32

func (*CreateReadSessionRequest) GetParent Uses

func (x *CreateReadSessionRequest) GetParent() string

func (*CreateReadSessionRequest) GetReadSession Uses

func (x *CreateReadSessionRequest) GetReadSession() *ReadSession

func (*CreateReadSessionRequest) ProtoMessage Uses

func (*CreateReadSessionRequest) ProtoMessage()

func (*CreateReadSessionRequest) ProtoReflect Uses

func (x *CreateReadSessionRequest) ProtoReflect() protoreflect.Message

func (*CreateReadSessionRequest) Reset Uses

func (x *CreateReadSessionRequest) Reset()

func (*CreateReadSessionRequest) String Uses

func (x *CreateReadSessionRequest) String() string

type CreateWriteStreamRequest Uses

type CreateWriteStreamRequest struct {

    // Required. Reference to the table to which the stream belongs, in the format
    // of `projects/{project}/datasets/{dataset}/tables/{table}`.
    Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
    // Required. Stream to be created.
    WriteStream *WriteStream `protobuf:"bytes,2,opt,name=write_stream,json=writeStream,proto3" json:"write_stream,omitempty"`
    // contains filtered or unexported fields
}

Request message for `CreateWriteStream`.

func (*CreateWriteStreamRequest) Descriptor Uses

func (*CreateWriteStreamRequest) Descriptor() ([]byte, []int)

Deprecated: Use CreateWriteStreamRequest.ProtoReflect.Descriptor instead.

func (*CreateWriteStreamRequest) GetParent Uses

func (x *CreateWriteStreamRequest) GetParent() string

func (*CreateWriteStreamRequest) GetWriteStream Uses

func (x *CreateWriteStreamRequest) GetWriteStream() *WriteStream

func (*CreateWriteStreamRequest) ProtoMessage Uses

func (*CreateWriteStreamRequest) ProtoMessage()

func (*CreateWriteStreamRequest) ProtoReflect Uses

func (x *CreateWriteStreamRequest) ProtoReflect() protoreflect.Message

func (*CreateWriteStreamRequest) Reset Uses

func (x *CreateWriteStreamRequest) Reset()

func (*CreateWriteStreamRequest) String Uses

func (x *CreateWriteStreamRequest) String() string

type DataFormat Uses

type DataFormat int32

Data format for input or output data.

const (
    DataFormat_DATA_FORMAT_UNSPECIFIED DataFormat = 0
    // Avro is a standard open source row based file format.
    // See https://avro.apache.org/ for more details.
    DataFormat_AVRO DataFormat = 1
    // Arrow is a standard open source column-based message format.
    // See https://arrow.apache.org/ for more details.
    DataFormat_ARROW DataFormat = 2
)

func (DataFormat) Descriptor Uses

func (DataFormat) Descriptor() protoreflect.EnumDescriptor

func (DataFormat) Enum Uses

func (x DataFormat) Enum() *DataFormat

func (DataFormat) EnumDescriptor Uses

func (DataFormat) EnumDescriptor() ([]byte, []int)

Deprecated: Use DataFormat.Descriptor instead.

func (DataFormat) Number Uses

func (x DataFormat) Number() protoreflect.EnumNumber

func (DataFormat) String Uses

func (x DataFormat) String() string

func (DataFormat) Type Uses

func (DataFormat) Type() protoreflect.EnumType

type FinalizeWriteStreamRequest Uses

type FinalizeWriteStreamRequest struct {

    // Required. Name of the stream to finalize, in the form of
    // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
    Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
    // contains filtered or unexported fields
}

Request message for invoking `FinalizeWriteStream`.

func (*FinalizeWriteStreamRequest) Descriptor Uses

func (*FinalizeWriteStreamRequest) Descriptor() ([]byte, []int)

Deprecated: Use FinalizeWriteStreamRequest.ProtoReflect.Descriptor instead.

func (*FinalizeWriteStreamRequest) GetName Uses

func (x *FinalizeWriteStreamRequest) GetName() string

func (*FinalizeWriteStreamRequest) ProtoMessage Uses

func (*FinalizeWriteStreamRequest) ProtoMessage()

func (*FinalizeWriteStreamRequest) ProtoReflect Uses

func (x *FinalizeWriteStreamRequest) ProtoReflect() protoreflect.Message

func (*FinalizeWriteStreamRequest) Reset Uses

func (x *FinalizeWriteStreamRequest) Reset()

func (*FinalizeWriteStreamRequest) String Uses

func (x *FinalizeWriteStreamRequest) String() string

type FinalizeWriteStreamResponse Uses

type FinalizeWriteStreamResponse struct {

    // Number of rows in the finalized stream.
    RowCount int64 `protobuf:"varint,1,opt,name=row_count,json=rowCount,proto3" json:"row_count,omitempty"`
    // contains filtered or unexported fields
}

Response message for `FinalizeWriteStream`.

func (*FinalizeWriteStreamResponse) Descriptor Uses

func (*FinalizeWriteStreamResponse) Descriptor() ([]byte, []int)

Deprecated: Use FinalizeWriteStreamResponse.ProtoReflect.Descriptor instead.

func (*FinalizeWriteStreamResponse) GetRowCount Uses

func (x *FinalizeWriteStreamResponse) GetRowCount() int64

func (*FinalizeWriteStreamResponse) ProtoMessage Uses

func (*FinalizeWriteStreamResponse) ProtoMessage()

func (*FinalizeWriteStreamResponse) ProtoReflect Uses

func (x *FinalizeWriteStreamResponse) ProtoReflect() protoreflect.Message

func (*FinalizeWriteStreamResponse) Reset Uses

func (x *FinalizeWriteStreamResponse) Reset()

func (*FinalizeWriteStreamResponse) String Uses

func (x *FinalizeWriteStreamResponse) String() string

type FlushRowsRequest Uses

type FlushRowsRequest struct {

    // Required. The stream that is the target of the flush operation.
    WriteStream string `protobuf:"bytes,1,opt,name=write_stream,json=writeStream,proto3" json:"write_stream,omitempty"`
    // Ending offset of the flush operation. Rows before this offset(including
    // this offset) will be flushed.
    Offset *wrapperspb.Int64Value `protobuf:"bytes,2,opt,name=offset,proto3" json:"offset,omitempty"`
    // contains filtered or unexported fields
}

Request message for `FlushRows`.

func (*FlushRowsRequest) Descriptor Uses

func (*FlushRowsRequest) Descriptor() ([]byte, []int)

Deprecated: Use FlushRowsRequest.ProtoReflect.Descriptor instead.

func (*FlushRowsRequest) GetOffset Uses

func (x *FlushRowsRequest) GetOffset() *wrapperspb.Int64Value

func (*FlushRowsRequest) GetWriteStream Uses

func (x *FlushRowsRequest) GetWriteStream() string

func (*FlushRowsRequest) ProtoMessage Uses

func (*FlushRowsRequest) ProtoMessage()

func (*FlushRowsRequest) ProtoReflect Uses

func (x *FlushRowsRequest) ProtoReflect() protoreflect.Message

func (*FlushRowsRequest) Reset Uses

func (x *FlushRowsRequest) Reset()

func (*FlushRowsRequest) String Uses

func (x *FlushRowsRequest) String() string

type FlushRowsResponse Uses

type FlushRowsResponse struct {

    // The rows before this offset (including this offset) are flushed.
    Offset int64 `protobuf:"varint,1,opt,name=offset,proto3" json:"offset,omitempty"`
    // contains filtered or unexported fields
}

Respond message for `FlushRows`.

func (*FlushRowsResponse) Descriptor Uses

func (*FlushRowsResponse) Descriptor() ([]byte, []int)

Deprecated: Use FlushRowsResponse.ProtoReflect.Descriptor instead.

func (*FlushRowsResponse) GetOffset Uses

func (x *FlushRowsResponse) GetOffset() int64

func (*FlushRowsResponse) ProtoMessage Uses

func (*FlushRowsResponse) ProtoMessage()

func (*FlushRowsResponse) ProtoReflect Uses

func (x *FlushRowsResponse) ProtoReflect() protoreflect.Message

func (*FlushRowsResponse) Reset Uses

func (x *FlushRowsResponse) Reset()

func (*FlushRowsResponse) String Uses

func (x *FlushRowsResponse) String() string

type GetWriteStreamRequest Uses

type GetWriteStreamRequest struct {

    // Required. Name of the stream to get, in the form of
    // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
    Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
    // contains filtered or unexported fields
}

Request message for `GetWriteStreamRequest`.

func (*GetWriteStreamRequest) Descriptor Uses

func (*GetWriteStreamRequest) Descriptor() ([]byte, []int)

Deprecated: Use GetWriteStreamRequest.ProtoReflect.Descriptor instead.

func (*GetWriteStreamRequest) GetName Uses

func (x *GetWriteStreamRequest) GetName() string

func (*GetWriteStreamRequest) ProtoMessage Uses

func (*GetWriteStreamRequest) ProtoMessage()

func (*GetWriteStreamRequest) ProtoReflect Uses

func (x *GetWriteStreamRequest) ProtoReflect() protoreflect.Message

func (*GetWriteStreamRequest) Reset Uses

func (x *GetWriteStreamRequest) Reset()

func (*GetWriteStreamRequest) String Uses

func (x *GetWriteStreamRequest) String() string

type ProtoRows Uses

type ProtoRows struct {

    // A sequence of rows serialized as a Protocol Buffer.
    //
    // See https://developers.google.com/protocol-buffers/docs/overview for more
    // information on deserializing this field.
    SerializedRows [][]byte `protobuf:"bytes,1,rep,name=serialized_rows,json=serializedRows,proto3" json:"serialized_rows,omitempty"`
    // contains filtered or unexported fields
}

Protobuf rows.

func (*ProtoRows) Descriptor Uses

func (*ProtoRows) Descriptor() ([]byte, []int)

Deprecated: Use ProtoRows.ProtoReflect.Descriptor instead.

func (*ProtoRows) GetSerializedRows Uses

func (x *ProtoRows) GetSerializedRows() [][]byte

func (*ProtoRows) ProtoMessage Uses

func (*ProtoRows) ProtoMessage()

func (*ProtoRows) ProtoReflect Uses

func (x *ProtoRows) ProtoReflect() protoreflect.Message

func (*ProtoRows) Reset Uses

func (x *ProtoRows) Reset()

func (*ProtoRows) String Uses

func (x *ProtoRows) String() string

type ProtoSchema Uses

type ProtoSchema struct {

    // Descriptor for input message. The descriptor has to be self contained,
    // including all the nested types, excepted for proto buffer well known types
    // (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf).
    ProtoDescriptor *descriptorpb.DescriptorProto `protobuf:"bytes,1,opt,name=proto_descriptor,json=protoDescriptor,proto3" json:"proto_descriptor,omitempty"`
    // contains filtered or unexported fields
}

Protobuf schema is an API presentation the proto buffer schema.

func (*ProtoSchema) Descriptor Uses

func (*ProtoSchema) Descriptor() ([]byte, []int)

Deprecated: Use ProtoSchema.ProtoReflect.Descriptor instead.

func (*ProtoSchema) GetProtoDescriptor Uses

func (x *ProtoSchema) GetProtoDescriptor() *descriptorpb.DescriptorProto

func (*ProtoSchema) ProtoMessage Uses

func (*ProtoSchema) ProtoMessage()

func (*ProtoSchema) ProtoReflect Uses

func (x *ProtoSchema) ProtoReflect() protoreflect.Message

func (*ProtoSchema) Reset Uses

func (x *ProtoSchema) Reset()

func (*ProtoSchema) String Uses

func (x *ProtoSchema) String() string

type ReadRowsRequest Uses

type ReadRowsRequest struct {

    // Required. Stream to read rows from.
    ReadStream string `protobuf:"bytes,1,opt,name=read_stream,json=readStream,proto3" json:"read_stream,omitempty"`
    // The offset requested must be less than the last row read from Read.
    // Requesting a larger offset is undefined. If not specified, start reading
    // from offset zero.
    Offset int64 `protobuf:"varint,2,opt,name=offset,proto3" json:"offset,omitempty"`
    // contains filtered or unexported fields
}

Request message for `ReadRows`.

func (*ReadRowsRequest) Descriptor Uses

func (*ReadRowsRequest) Descriptor() ([]byte, []int)

Deprecated: Use ReadRowsRequest.ProtoReflect.Descriptor instead.

func (*ReadRowsRequest) GetOffset Uses

func (x *ReadRowsRequest) GetOffset() int64

func (*ReadRowsRequest) GetReadStream Uses

func (x *ReadRowsRequest) GetReadStream() string

func (*ReadRowsRequest) ProtoMessage Uses

func (*ReadRowsRequest) ProtoMessage()

func (*ReadRowsRequest) ProtoReflect Uses

func (x *ReadRowsRequest) ProtoReflect() protoreflect.Message

func (*ReadRowsRequest) Reset Uses

func (x *ReadRowsRequest) Reset()

func (*ReadRowsRequest) String Uses

func (x *ReadRowsRequest) String() string

type ReadRowsResponse Uses

type ReadRowsResponse struct {

    // Row data is returned in format specified during session creation.
    //
    // Types that are assignable to Rows:
    //	*ReadRowsResponse_AvroRows
    //	*ReadRowsResponse_ArrowRecordBatch
    Rows isReadRowsResponse_Rows `protobuf_oneof:"rows"`
    // Number of serialized rows in the rows block.
    RowCount int64 `protobuf:"varint,6,opt,name=row_count,json=rowCount,proto3" json:"row_count,omitempty"`
    // Statistics for the stream.
    Stats *StreamStats `protobuf:"bytes,2,opt,name=stats,proto3" json:"stats,omitempty"`
    // Throttling state. If unset, the latest response still describes
    // the current throttling status.
    ThrottleState *ThrottleState `protobuf:"bytes,5,opt,name=throttle_state,json=throttleState,proto3" json:"throttle_state,omitempty"`
    // contains filtered or unexported fields
}

Response from calling `ReadRows` may include row data, progress and throttling information.

func (*ReadRowsResponse) Descriptor Uses

func (*ReadRowsResponse) Descriptor() ([]byte, []int)

Deprecated: Use ReadRowsResponse.ProtoReflect.Descriptor instead.

func (*ReadRowsResponse) GetArrowRecordBatch Uses

func (x *ReadRowsResponse) GetArrowRecordBatch() *ArrowRecordBatch

func (*ReadRowsResponse) GetAvroRows Uses

func (x *ReadRowsResponse) GetAvroRows() *AvroRows

func (*ReadRowsResponse) GetRowCount Uses

func (x *ReadRowsResponse) GetRowCount() int64

func (*ReadRowsResponse) GetRows Uses

func (m *ReadRowsResponse) GetRows() isReadRowsResponse_Rows

func (*ReadRowsResponse) GetStats Uses

func (x *ReadRowsResponse) GetStats() *StreamStats

func (*ReadRowsResponse) GetThrottleState Uses

func (x *ReadRowsResponse) GetThrottleState() *ThrottleState

func (*ReadRowsResponse) ProtoMessage Uses

func (*ReadRowsResponse) ProtoMessage()

func (*ReadRowsResponse) ProtoReflect Uses

func (x *ReadRowsResponse) ProtoReflect() protoreflect.Message

func (*ReadRowsResponse) Reset Uses

func (x *ReadRowsResponse) Reset()

func (*ReadRowsResponse) String Uses

func (x *ReadRowsResponse) String() string

type ReadRowsResponse_ArrowRecordBatch Uses

type ReadRowsResponse_ArrowRecordBatch struct {
    // Serialized row data in Arrow RecordBatch format.
    ArrowRecordBatch *ArrowRecordBatch `protobuf:"bytes,4,opt,name=arrow_record_batch,json=arrowRecordBatch,proto3,oneof"`
}

type ReadRowsResponse_AvroRows Uses

type ReadRowsResponse_AvroRows struct {
    // Serialized row data in AVRO format.
    AvroRows *AvroRows `protobuf:"bytes,3,opt,name=avro_rows,json=avroRows,proto3,oneof"`
}

type ReadSession Uses

type ReadSession struct {

    // Output only. Unique identifier for the session, in the form
    // `projects/{project_id}/locations/{location}/sessions/{session_id}`.
    Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
    // Output only. Time at which the session becomes invalid. After this time, subsequent
    // requests to read this Session will return errors. The expire_time is
    // automatically assigned and currently cannot be specified or updated.
    ExpireTime *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"`
    // Immutable. Data format of the output data.
    DataFormat DataFormat `protobuf:"varint,3,opt,name=data_format,json=dataFormat,proto3,enum=google.cloud.bigquery.storage.v1beta2.DataFormat" json:"data_format,omitempty"`
    // The schema for the read. If read_options.selected_fields is set, the
    // schema may be different from the table schema as it will only contain
    // the selected fields.
    //
    // Types that are assignable to Schema:
    //	*ReadSession_AvroSchema
    //	*ReadSession_ArrowSchema
    Schema isReadSession_Schema `protobuf_oneof:"schema"`
    // Immutable. Table that this ReadSession is reading from, in the form
    // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}
    Table string `protobuf:"bytes,6,opt,name=table,proto3" json:"table,omitempty"`
    // Optional. Any modifiers which are applied when reading from the specified table.
    TableModifiers *ReadSession_TableModifiers `protobuf:"bytes,7,opt,name=table_modifiers,json=tableModifiers,proto3" json:"table_modifiers,omitempty"`
    // Optional. Read options for this session (e.g. column selection, filters).
    ReadOptions *ReadSession_TableReadOptions `protobuf:"bytes,8,opt,name=read_options,json=readOptions,proto3" json:"read_options,omitempty"`
    // Output only. A list of streams created with the session.
    //
    // At least one stream is created with the session. In the future, larger
    // request_stream_count values *may* result in this list being unpopulated,
    // in that case, the user will need to use a List method to get the streams
    // instead, which is not yet available.
    Streams []*ReadStream `protobuf:"bytes,10,rep,name=streams,proto3" json:"streams,omitempty"`
    // contains filtered or unexported fields
}

Information about the ReadSession.

func (*ReadSession) Descriptor Uses

func (*ReadSession) Descriptor() ([]byte, []int)

Deprecated: Use ReadSession.ProtoReflect.Descriptor instead.

func (*ReadSession) GetArrowSchema Uses

func (x *ReadSession) GetArrowSchema() *ArrowSchema

func (*ReadSession) GetAvroSchema Uses

func (x *ReadSession) GetAvroSchema() *AvroSchema

func (*ReadSession) GetDataFormat Uses

func (x *ReadSession) GetDataFormat() DataFormat

func (*ReadSession) GetExpireTime Uses

func (x *ReadSession) GetExpireTime() *timestamppb.Timestamp

func (*ReadSession) GetName Uses

func (x *ReadSession) GetName() string

func (*ReadSession) GetReadOptions Uses

func (x *ReadSession) GetReadOptions() *ReadSession_TableReadOptions

func (*ReadSession) GetSchema Uses

func (m *ReadSession) GetSchema() isReadSession_Schema

func (*ReadSession) GetStreams Uses

func (x *ReadSession) GetStreams() []*ReadStream

func (*ReadSession) GetTable Uses

func (x *ReadSession) GetTable() string

func (*ReadSession) GetTableModifiers Uses

func (x *ReadSession) GetTableModifiers() *ReadSession_TableModifiers

func (*ReadSession) ProtoMessage Uses

func (*ReadSession) ProtoMessage()

func (*ReadSession) ProtoReflect Uses

func (x *ReadSession) ProtoReflect() protoreflect.Message

func (*ReadSession) Reset Uses

func (x *ReadSession) Reset()

func (*ReadSession) String Uses

func (x *ReadSession) String() string

type ReadSession_ArrowSchema Uses

type ReadSession_ArrowSchema struct {
    // Output only. Arrow schema.
    ArrowSchema *ArrowSchema `protobuf:"bytes,5,opt,name=arrow_schema,json=arrowSchema,proto3,oneof"`
}

type ReadSession_AvroSchema Uses

type ReadSession_AvroSchema struct {
    // Output only. Avro schema.
    AvroSchema *AvroSchema `protobuf:"bytes,4,opt,name=avro_schema,json=avroSchema,proto3,oneof"`
}

type ReadSession_TableModifiers Uses

type ReadSession_TableModifiers struct {

    // The snapshot time of the table. If not set, interpreted as now.
    SnapshotTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=snapshot_time,json=snapshotTime,proto3" json:"snapshot_time,omitempty"`
    // contains filtered or unexported fields
}

Additional attributes when reading a table.

func (*ReadSession_TableModifiers) Descriptor Uses

func (*ReadSession_TableModifiers) Descriptor() ([]byte, []int)

Deprecated: Use ReadSession_TableModifiers.ProtoReflect.Descriptor instead.

func (*ReadSession_TableModifiers) GetSnapshotTime Uses

func (x *ReadSession_TableModifiers) GetSnapshotTime() *timestamppb.Timestamp

func (*ReadSession_TableModifiers) ProtoMessage Uses

func (*ReadSession_TableModifiers) ProtoMessage()

func (*ReadSession_TableModifiers) ProtoReflect Uses

func (x *ReadSession_TableModifiers) ProtoReflect() protoreflect.Message

func (*ReadSession_TableModifiers) Reset Uses

func (x *ReadSession_TableModifiers) Reset()

func (*ReadSession_TableModifiers) String Uses

func (x *ReadSession_TableModifiers) String() string

type ReadSession_TableReadOptions Uses

type ReadSession_TableReadOptions struct {

    // Names of the fields in the table that should be read. If empty, all
    // fields will be read. If the specified field is a nested field, all
    // the sub-fields in the field will be selected. The output field order is
    // unrelated to the order of fields in selected_fields.
    SelectedFields []string `protobuf:"bytes,1,rep,name=selected_fields,json=selectedFields,proto3" json:"selected_fields,omitempty"`
    // SQL text filtering statement, similar to a WHERE clause in a query.
    // Aggregates are not supported.
    //
    // Examples: "int_field > 5"
    //           "date_field = CAST('2014-9-27' as DATE)"
    //           "nullable_field is not NULL"
    //           "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))"
    //           "numeric_field BETWEEN 1.0 AND 5.0"
    RowRestriction string `protobuf:"bytes,2,opt,name=row_restriction,json=rowRestriction,proto3" json:"row_restriction,omitempty"`
    // Optional. Options specific to the Apache Arrow output format.
    ArrowSerializationOptions *ArrowSerializationOptions `protobuf:"bytes,3,opt,name=arrow_serialization_options,json=arrowSerializationOptions,proto3" json:"arrow_serialization_options,omitempty"`
    // contains filtered or unexported fields
}

Options dictating how we read a table.

func (*ReadSession_TableReadOptions) Descriptor Uses

func (*ReadSession_TableReadOptions) Descriptor() ([]byte, []int)

Deprecated: Use ReadSession_TableReadOptions.ProtoReflect.Descriptor instead.

func (*ReadSession_TableReadOptions) GetArrowSerializationOptions Uses

func (x *ReadSession_TableReadOptions) GetArrowSerializationOptions() *ArrowSerializationOptions

func (*ReadSession_TableReadOptions) GetRowRestriction Uses

func (x *ReadSession_TableReadOptions) GetRowRestriction() string

func (*ReadSession_TableReadOptions) GetSelectedFields Uses

func (x *ReadSession_TableReadOptions) GetSelectedFields() []string

func (*ReadSession_TableReadOptions) ProtoMessage Uses

func (*ReadSession_TableReadOptions) ProtoMessage()

func (*ReadSession_TableReadOptions) ProtoReflect Uses

func (x *ReadSession_TableReadOptions) ProtoReflect() protoreflect.Message

func (*ReadSession_TableReadOptions) Reset Uses

func (x *ReadSession_TableReadOptions) Reset()

func (*ReadSession_TableReadOptions) String Uses

func (x *ReadSession_TableReadOptions) String() string

type ReadStream Uses

type ReadStream struct {

    // Output only. Name of the stream, in the form
    // `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`.
    Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
    // contains filtered or unexported fields
}

Information about a single stream that gets data out of the storage system. Most of the information about `ReadStream` instances is aggregated, making `ReadStream` lightweight.

func (*ReadStream) Descriptor Uses

func (*ReadStream) Descriptor() ([]byte, []int)

Deprecated: Use ReadStream.ProtoReflect.Descriptor instead.

func (*ReadStream) GetName Uses

func (x *ReadStream) GetName() string

func (*ReadStream) ProtoMessage Uses

func (*ReadStream) ProtoMessage()

func (*ReadStream) ProtoReflect Uses

func (x *ReadStream) ProtoReflect() protoreflect.Message

func (*ReadStream) Reset Uses

func (x *ReadStream) Reset()

func (*ReadStream) String Uses

func (x *ReadStream) String() string

type SplitReadStreamRequest Uses

type SplitReadStreamRequest struct {

    // Required. Name of the stream to split.
    Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
    // A value in the range (0.0, 1.0) that specifies the fractional point at
    // which the original stream should be split. The actual split point is
    // evaluated on pre-filtered rows, so if a filter is provided, then there is
    // no guarantee that the division of the rows between the new child streams
    // will be proportional to this fractional value. Additionally, because the
    // server-side unit for assigning data is collections of rows, this fraction
    // will always map to a data storage boundary on the server side.
    Fraction float64 `protobuf:"fixed64,2,opt,name=fraction,proto3" json:"fraction,omitempty"`
    // contains filtered or unexported fields
}

Request message for `SplitReadStream`.

func (*SplitReadStreamRequest) Descriptor Uses

func (*SplitReadStreamRequest) Descriptor() ([]byte, []int)

Deprecated: Use SplitReadStreamRequest.ProtoReflect.Descriptor instead.

func (*SplitReadStreamRequest) GetFraction Uses

func (x *SplitReadStreamRequest) GetFraction() float64

func (*SplitReadStreamRequest) GetName Uses

func (x *SplitReadStreamRequest) GetName() string

func (*SplitReadStreamRequest) ProtoMessage Uses

func (*SplitReadStreamRequest) ProtoMessage()

func (*SplitReadStreamRequest) ProtoReflect Uses

func (x *SplitReadStreamRequest) ProtoReflect() protoreflect.Message

func (*SplitReadStreamRequest) Reset Uses

func (x *SplitReadStreamRequest) Reset()

func (*SplitReadStreamRequest) String Uses

func (x *SplitReadStreamRequest) String() string

type SplitReadStreamResponse Uses

type SplitReadStreamResponse struct {

    // Primary stream, which contains the beginning portion of
    // |original_stream|. An empty value indicates that the original stream can no
    // longer be split.
    PrimaryStream *ReadStream `protobuf:"bytes,1,opt,name=primary_stream,json=primaryStream,proto3" json:"primary_stream,omitempty"`
    // Remainder stream, which contains the tail of |original_stream|. An empty
    // value indicates that the original stream can no longer be split.
    RemainderStream *ReadStream `protobuf:"bytes,2,opt,name=remainder_stream,json=remainderStream,proto3" json:"remainder_stream,omitempty"`
    // contains filtered or unexported fields
}

func (*SplitReadStreamResponse) Descriptor Uses

func (*SplitReadStreamResponse) Descriptor() ([]byte, []int)

Deprecated: Use SplitReadStreamResponse.ProtoReflect.Descriptor instead.

func (*SplitReadStreamResponse) GetPrimaryStream Uses

func (x *SplitReadStreamResponse) GetPrimaryStream() *ReadStream

func (*SplitReadStreamResponse) GetRemainderStream Uses

func (x *SplitReadStreamResponse) GetRemainderStream() *ReadStream

func (*SplitReadStreamResponse) ProtoMessage Uses

func (*SplitReadStreamResponse) ProtoMessage()

func (*SplitReadStreamResponse) ProtoReflect Uses

func (x *SplitReadStreamResponse) ProtoReflect() protoreflect.Message

func (*SplitReadStreamResponse) Reset Uses

func (x *SplitReadStreamResponse) Reset()

func (*SplitReadStreamResponse) String Uses

func (x *SplitReadStreamResponse) String() string

type StorageError Uses

type StorageError struct {

    // BigQuery Storage specific error code.
    Code StorageError_StorageErrorCode `protobuf:"varint,1,opt,name=code,proto3,enum=google.cloud.bigquery.storage.v1beta2.StorageError_StorageErrorCode" json:"code,omitempty"`
    // Name of the failed entity.
    Entity string `protobuf:"bytes,2,opt,name=entity,proto3" json:"entity,omitempty"`
    // Message that describes the error.
    ErrorMessage string `protobuf:"bytes,3,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"`
    // contains filtered or unexported fields
}

Structured custom BigQuery Storage error message. The error can be attached as error details in the returned rpc Status. User can use the info to process errors in a structural way, rather than having to parse error messages.

func (*StorageError) Descriptor Uses

func (*StorageError) Descriptor() ([]byte, []int)

Deprecated: Use StorageError.ProtoReflect.Descriptor instead.

func (*StorageError) GetCode Uses

func (x *StorageError) GetCode() StorageError_StorageErrorCode

func (*StorageError) GetEntity Uses

func (x *StorageError) GetEntity() string

func (*StorageError) GetErrorMessage Uses

func (x *StorageError) GetErrorMessage() string

func (*StorageError) ProtoMessage Uses

func (*StorageError) ProtoMessage()

func (*StorageError) ProtoReflect Uses

func (x *StorageError) ProtoReflect() protoreflect.Message

func (*StorageError) Reset Uses

func (x *StorageError) Reset()

func (*StorageError) String Uses

func (x *StorageError) String() string

type StorageError_StorageErrorCode Uses

type StorageError_StorageErrorCode int32

Error code for `StorageError`.

const (
    // Default error.
    StorageError_STORAGE_ERROR_CODE_UNSPECIFIED StorageError_StorageErrorCode = 0
    // Table is not found in the system.
    StorageError_TABLE_NOT_FOUND StorageError_StorageErrorCode = 1
    // Stream is already committed.
    StorageError_STREAM_ALREADY_COMMITTED StorageError_StorageErrorCode = 2
    // Stream is not found.
    StorageError_STREAM_NOT_FOUND StorageError_StorageErrorCode = 3
    // Invalid Stream type.
    // For example, you try to commit a stream that is not pending.
    StorageError_INVALID_STREAM_TYPE StorageError_StorageErrorCode = 4
    // Invalid Stream state.
    // For example, you try to commit a stream that is not fianlized or is
    // garbaged.
    StorageError_INVALID_STREAM_STATE StorageError_StorageErrorCode = 5
)

func (StorageError_StorageErrorCode) Descriptor Uses

func (StorageError_StorageErrorCode) Descriptor() protoreflect.EnumDescriptor

func (StorageError_StorageErrorCode) Enum Uses

func (x StorageError_StorageErrorCode) Enum() *StorageError_StorageErrorCode

func (StorageError_StorageErrorCode) EnumDescriptor Uses

func (StorageError_StorageErrorCode) EnumDescriptor() ([]byte, []int)

Deprecated: Use StorageError_StorageErrorCode.Descriptor instead.

func (StorageError_StorageErrorCode) Number Uses

func (x StorageError_StorageErrorCode) Number() protoreflect.EnumNumber

func (StorageError_StorageErrorCode) String Uses

func (x StorageError_StorageErrorCode) String() string

func (StorageError_StorageErrorCode) Type Uses

func (StorageError_StorageErrorCode) Type() protoreflect.EnumType

type StreamStats Uses

type StreamStats struct {

    // Represents the progress of the current stream.
    Progress *StreamStats_Progress `protobuf:"bytes,2,opt,name=progress,proto3" json:"progress,omitempty"`
    // contains filtered or unexported fields
}

Estimated stream statistics for a given Stream.

func (*StreamStats) Descriptor Uses

func (*StreamStats) Descriptor() ([]byte, []int)

Deprecated: Use StreamStats.ProtoReflect.Descriptor instead.

func (*StreamStats) GetProgress Uses

func (x *StreamStats) GetProgress() *StreamStats_Progress

func (*StreamStats) ProtoMessage Uses

func (*StreamStats) ProtoMessage()

func (*StreamStats) ProtoReflect Uses

func (x *StreamStats) ProtoReflect() protoreflect.Message

func (*StreamStats) Reset Uses

func (x *StreamStats) Reset()

func (*StreamStats) String Uses

func (x *StreamStats) String() string

type StreamStats_Progress Uses

type StreamStats_Progress struct {

    // The fraction of rows assigned to the stream that have been processed by
    // the server so far, not including the rows in the current response
    // message.
    //
    // This value, along with `at_response_end`, can be used to interpolate
    // the progress made as the rows in the message are being processed using
    // the following formula: `at_response_start + (at_response_end -
    // at_response_start) * rows_processed_from_response / rows_in_response`.
    //
    // Note that if a filter is provided, the `at_response_end` value of the
    // previous response may not necessarily be equal to the
    // `at_response_start` value of the current response.
    AtResponseStart float64 `protobuf:"fixed64,1,opt,name=at_response_start,json=atResponseStart,proto3" json:"at_response_start,omitempty"`
    // Similar to `at_response_start`, except that this value includes the
    // rows in the current response.
    AtResponseEnd float64 `protobuf:"fixed64,2,opt,name=at_response_end,json=atResponseEnd,proto3" json:"at_response_end,omitempty"`
    // contains filtered or unexported fields
}

func (*StreamStats_Progress) Descriptor Uses

func (*StreamStats_Progress) Descriptor() ([]byte, []int)

Deprecated: Use StreamStats_Progress.ProtoReflect.Descriptor instead.

func (*StreamStats_Progress) GetAtResponseEnd Uses

func (x *StreamStats_Progress) GetAtResponseEnd() float64

func (*StreamStats_Progress) GetAtResponseStart Uses

func (x *StreamStats_Progress) GetAtResponseStart() float64

func (*StreamStats_Progress) ProtoMessage Uses

func (*StreamStats_Progress) ProtoMessage()

func (*StreamStats_Progress) ProtoReflect Uses

func (x *StreamStats_Progress) ProtoReflect() protoreflect.Message

func (*StreamStats_Progress) Reset Uses

func (x *StreamStats_Progress) Reset()

func (*StreamStats_Progress) String Uses

func (x *StreamStats_Progress) String() string

type TableFieldSchema Uses

type TableFieldSchema struct {

    // Required. The field name. The name must contain only letters (a-z, A-Z),
    // numbers (0-9), or underscores (_), and must start with a letter or
    // underscore. The maximum length is 128 characters.
    Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
    // Required. The field data type.
    Type TableFieldSchema_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.bigquery.storage.v1beta2.TableFieldSchema_Type" json:"type,omitempty"`
    // Optional. The field mode. The default value is NULLABLE.
    Mode TableFieldSchema_Mode `protobuf:"varint,3,opt,name=mode,proto3,enum=google.cloud.bigquery.storage.v1beta2.TableFieldSchema_Mode" json:"mode,omitempty"`
    // Optional. Describes the nested schema fields if the type property is set to STRUCT.
    Fields []*TableFieldSchema `protobuf:"bytes,4,rep,name=fields,proto3" json:"fields,omitempty"`
    // Optional. The field description. The maximum length is 1,024 characters.
    Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"`
    // contains filtered or unexported fields
}

A field in TableSchema

func (*TableFieldSchema) Descriptor Uses

func (*TableFieldSchema) Descriptor() ([]byte, []int)

Deprecated: Use TableFieldSchema.ProtoReflect.Descriptor instead.

func (*TableFieldSchema) GetDescription Uses

func (x *TableFieldSchema) GetDescription() string

func (*TableFieldSchema) GetFields Uses

func (x *TableFieldSchema) GetFields() []*TableFieldSchema

func (*TableFieldSchema) GetMode Uses

func (x *TableFieldSchema) GetMode() TableFieldSchema_Mode

func (*TableFieldSchema) GetName Uses

func (x *TableFieldSchema) GetName() string

func (*TableFieldSchema) GetType Uses

func (x *TableFieldSchema) GetType() TableFieldSchema_Type

func (*TableFieldSchema) ProtoMessage Uses

func (*TableFieldSchema) ProtoMessage()

func (*TableFieldSchema) ProtoReflect Uses

func (x *TableFieldSchema) ProtoReflect() protoreflect.Message

func (*TableFieldSchema) Reset Uses

func (x *TableFieldSchema) Reset()

func (*TableFieldSchema) String Uses

func (x *TableFieldSchema) String() string

type TableFieldSchema_Mode Uses

type TableFieldSchema_Mode int32
const (
    // Illegal value
    TableFieldSchema_MODE_UNSPECIFIED TableFieldSchema_Mode = 0
    TableFieldSchema_NULLABLE         TableFieldSchema_Mode = 1
    TableFieldSchema_REQUIRED         TableFieldSchema_Mode = 2
    TableFieldSchema_REPEATED         TableFieldSchema_Mode = 3
)

func (TableFieldSchema_Mode) Descriptor Uses

func (TableFieldSchema_Mode) Descriptor() protoreflect.EnumDescriptor

func (TableFieldSchema_Mode) Enum Uses

func (x TableFieldSchema_Mode) Enum() *TableFieldSchema_Mode

func (TableFieldSchema_Mode) EnumDescriptor Uses

func (TableFieldSchema_Mode) EnumDescriptor() ([]byte, []int)

Deprecated: Use TableFieldSchema_Mode.Descriptor instead.

func (TableFieldSchema_Mode) Number Uses

func (x TableFieldSchema_Mode) Number() protoreflect.EnumNumber

func (TableFieldSchema_Mode) String Uses

func (x TableFieldSchema_Mode) String() string

func (TableFieldSchema_Mode) Type Uses

func (TableFieldSchema_Mode) Type() protoreflect.EnumType

type TableFieldSchema_Type Uses

type TableFieldSchema_Type int32
const (
    // Illegal value
    TableFieldSchema_TYPE_UNSPECIFIED TableFieldSchema_Type = 0
    // 64K, UTF8
    TableFieldSchema_STRING TableFieldSchema_Type = 1
    // 64-bit signed
    TableFieldSchema_INT64 TableFieldSchema_Type = 2
    // 64-bit IEEE floating point
    TableFieldSchema_DOUBLE TableFieldSchema_Type = 3
    // Aggregate type
    TableFieldSchema_STRUCT TableFieldSchema_Type = 4
    // 64K, Binary
    TableFieldSchema_BYTES TableFieldSchema_Type = 5
    // 2-valued
    TableFieldSchema_BOOL TableFieldSchema_Type = 6
    // 64-bit signed usec since UTC epoch
    TableFieldSchema_TIMESTAMP TableFieldSchema_Type = 7
    // Civil date - Year, Month, Day
    TableFieldSchema_DATE TableFieldSchema_Type = 8
    // Civil time - Hour, Minute, Second, Microseconds
    TableFieldSchema_TIME TableFieldSchema_Type = 9
    // Combination of civil date and civil time
    TableFieldSchema_DATETIME TableFieldSchema_Type = 10
    // Geography object
    TableFieldSchema_GEOGRAPHY TableFieldSchema_Type = 11
    // Numeric value
    TableFieldSchema_NUMERIC TableFieldSchema_Type = 12
)

func (TableFieldSchema_Type) Descriptor Uses

func (TableFieldSchema_Type) Descriptor() protoreflect.EnumDescriptor

func (TableFieldSchema_Type) Enum Uses

func (x TableFieldSchema_Type) Enum() *TableFieldSchema_Type

func (TableFieldSchema_Type) EnumDescriptor Uses

func (TableFieldSchema_Type) EnumDescriptor() ([]byte, []int)

Deprecated: Use TableFieldSchema_Type.Descriptor instead.

func (TableFieldSchema_Type) Number Uses

func (x TableFieldSchema_Type) Number() protoreflect.EnumNumber

func (TableFieldSchema_Type) String Uses

func (x TableFieldSchema_Type) String() string

func (TableFieldSchema_Type) Type Uses

func (TableFieldSchema_Type) Type() protoreflect.EnumType

type TableSchema Uses

type TableSchema struct {

    // Describes the fields in a table.
    Fields []*TableFieldSchema `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"`
    // contains filtered or unexported fields
}

Schema of a table

func (*TableSchema) Descriptor Uses

func (*TableSchema) Descriptor() ([]byte, []int)

Deprecated: Use TableSchema.ProtoReflect.Descriptor instead.

func (*TableSchema) GetFields Uses

func (x *TableSchema) GetFields() []*TableFieldSchema

func (*TableSchema) ProtoMessage Uses

func (*TableSchema) ProtoMessage()

func (*TableSchema) ProtoReflect Uses

func (x *TableSchema) ProtoReflect() protoreflect.Message

func (*TableSchema) Reset Uses

func (x *TableSchema) Reset()

func (*TableSchema) String Uses

func (x *TableSchema) String() string

type ThrottleState Uses

type ThrottleState struct {

    // How much this connection is being throttled. Zero means no throttling,
    // 100 means fully throttled.
    ThrottlePercent int32 `protobuf:"varint,1,opt,name=throttle_percent,json=throttlePercent,proto3" json:"throttle_percent,omitempty"`
    // contains filtered or unexported fields
}

Information on if the current connection is being throttled.

func (*ThrottleState) Descriptor Uses

func (*ThrottleState) Descriptor() ([]byte, []int)

Deprecated: Use ThrottleState.ProtoReflect.Descriptor instead.

func (*ThrottleState) GetThrottlePercent Uses

func (x *ThrottleState) GetThrottlePercent() int32

func (*ThrottleState) ProtoMessage Uses

func (*ThrottleState) ProtoMessage()

func (*ThrottleState) ProtoReflect Uses

func (x *ThrottleState) ProtoReflect() protoreflect.Message

func (*ThrottleState) Reset Uses

func (x *ThrottleState) Reset()

func (*ThrottleState) String Uses

func (x *ThrottleState) String() string

type UnimplementedBigQueryReadServer Uses

type UnimplementedBigQueryReadServer struct {
}

UnimplementedBigQueryReadServer can be embedded to have forward compatible implementations.

func (*UnimplementedBigQueryReadServer) CreateReadSession Uses

func (*UnimplementedBigQueryReadServer) CreateReadSession(context.Context, *CreateReadSessionRequest) (*ReadSession, error)

func (*UnimplementedBigQueryReadServer) ReadRows Uses

func (*UnimplementedBigQueryReadServer) ReadRows(*ReadRowsRequest, BigQueryRead_ReadRowsServer) error

func (*UnimplementedBigQueryReadServer) SplitReadStream Uses

func (*UnimplementedBigQueryReadServer) SplitReadStream(context.Context, *SplitReadStreamRequest) (*SplitReadStreamResponse, error)

type UnimplementedBigQueryWriteServer Uses

type UnimplementedBigQueryWriteServer struct {
}

UnimplementedBigQueryWriteServer can be embedded to have forward compatible implementations.

func (*UnimplementedBigQueryWriteServer) AppendRows Uses

func (*UnimplementedBigQueryWriteServer) AppendRows(BigQueryWrite_AppendRowsServer) error

func (*UnimplementedBigQueryWriteServer) BatchCommitWriteStreams Uses

func (*UnimplementedBigQueryWriteServer) BatchCommitWriteStreams(context.Context, *BatchCommitWriteStreamsRequest) (*BatchCommitWriteStreamsResponse, error)

func (*UnimplementedBigQueryWriteServer) CreateWriteStream Uses

func (*UnimplementedBigQueryWriteServer) CreateWriteStream(context.Context, *CreateWriteStreamRequest) (*WriteStream, error)

func (*UnimplementedBigQueryWriteServer) FinalizeWriteStream Uses

func (*UnimplementedBigQueryWriteServer) FinalizeWriteStream(context.Context, *FinalizeWriteStreamRequest) (*FinalizeWriteStreamResponse, error)

func (*UnimplementedBigQueryWriteServer) FlushRows Uses

func (*UnimplementedBigQueryWriteServer) FlushRows(context.Context, *FlushRowsRequest) (*FlushRowsResponse, error)

func (*UnimplementedBigQueryWriteServer) GetWriteStream Uses

func (*UnimplementedBigQueryWriteServer) GetWriteStream(context.Context, *GetWriteStreamRequest) (*WriteStream, error)

type WriteStream Uses

type WriteStream struct {

    // Output only. Name of the stream, in the form
    // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
    Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
    // Immutable. Type of the stream.
    Type WriteStream_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.bigquery.storage.v1beta2.WriteStream_Type" json:"type,omitempty"`
    // Output only. Create time of the stream. For the _default stream, this is the
    // creation_time of the table.
    CreateTime *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"`
    // Output only. Commit time of the stream.
    // If a stream is of `COMMITTED` type, then it will have a commit_time same as
    // `create_time`. If the stream is of `PENDING` type, commit_time being empty
    // means it is not committed.
    CommitTime *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=commit_time,json=commitTime,proto3" json:"commit_time,omitempty"`
    // Output only. The schema of the destination table. It is only returned in
    // `CreateWriteStream` response. Caller should generate data that's
    // compatible with this schema to send in initial `AppendRowsRequest`.
    // The table schema could go out of date during the life time of the stream.
    TableSchema *TableSchema `protobuf:"bytes,5,opt,name=table_schema,json=tableSchema,proto3" json:"table_schema,omitempty"`
    // contains filtered or unexported fields
}

Information about a single stream that gets data inside the storage system.

func (*WriteStream) Descriptor Uses

func (*WriteStream) Descriptor() ([]byte, []int)

Deprecated: Use WriteStream.ProtoReflect.Descriptor instead.

func (*WriteStream) GetCommitTime Uses

func (x *WriteStream) GetCommitTime() *timestamppb.Timestamp

func (*WriteStream) GetCreateTime Uses

func (x *WriteStream) GetCreateTime() *timestamppb.Timestamp

func (*WriteStream) GetName Uses

func (x *WriteStream) GetName() string

func (*WriteStream) GetTableSchema Uses

func (x *WriteStream) GetTableSchema() *TableSchema

func (*WriteStream) GetType Uses

func (x *WriteStream) GetType() WriteStream_Type

func (*WriteStream) ProtoMessage Uses

func (*WriteStream) ProtoMessage()

func (*WriteStream) ProtoReflect Uses

func (x *WriteStream) ProtoReflect() protoreflect.Message

func (*WriteStream) Reset Uses

func (x *WriteStream) Reset()

func (*WriteStream) String Uses

func (x *WriteStream) String() string

type WriteStream_Type Uses

type WriteStream_Type int32

Type enum of the stream.

const (
    // Unknown type.
    WriteStream_TYPE_UNSPECIFIED WriteStream_Type = 0
    // Data will commit automatically and appear as soon as the write is
    // acknowledged.
    WriteStream_COMMITTED WriteStream_Type = 1
    // Data is invisible until the stream is committed.
    WriteStream_PENDING WriteStream_Type = 2
    // Data is only visible up to the offset to which it was flushed.
    WriteStream_BUFFERED WriteStream_Type = 3
)

func (WriteStream_Type) Descriptor Uses

func (WriteStream_Type) Descriptor() protoreflect.EnumDescriptor

func (WriteStream_Type) Enum Uses

func (x WriteStream_Type) Enum() *WriteStream_Type

func (WriteStream_Type) EnumDescriptor Uses

func (WriteStream_Type) EnumDescriptor() ([]byte, []int)

Deprecated: Use WriteStream_Type.Descriptor instead.

func (WriteStream_Type) Number Uses

func (x WriteStream_Type) Number() protoreflect.EnumNumber

func (WriteStream_Type) String Uses

func (x WriteStream_Type) String() string

func (WriteStream_Type) Type Uses

func (WriteStream_Type) Type() protoreflect.EnumType

Package storage imports 14 packages (graph) and is imported by 1 packages. Updated 2020-12-05. Refresh now. Tools for package owners.