storage

package
v0.0.0-...-e165f0f Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Sep 1, 2020 License: Apache-2.0 Imports: 9 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var ArrowSerializationOptions_Format_name = map[int32]string{
	0: "FORMAT_UNSPECIFIED",
	1: "ARROW_0_14",
	2: "ARROW_0_15",
}
View Source
var ArrowSerializationOptions_Format_value = map[string]int32{
	"FORMAT_UNSPECIFIED": 0,
	"ARROW_0_14":         1,
	"ARROW_0_15":         2,
}
View Source
var DataFormat_name = map[int32]string{
	0: "DATA_FORMAT_UNSPECIFIED",
	1: "AVRO",
	2: "ARROW",
}
View Source
var DataFormat_value = map[string]int32{
	"DATA_FORMAT_UNSPECIFIED": 0,
	"AVRO":                    1,
	"ARROW":                   2,
}

Functions

func RegisterBigQueryReadServer

func RegisterBigQueryReadServer(s *grpc.Server, srv BigQueryReadServer)

Types

type ArrowRecordBatch

type ArrowRecordBatch struct {
	// IPC-serialized Arrow RecordBatch.
	SerializedRecordBatch []byte   `` /* 126-byte string literal not displayed */
	XXX_NoUnkeyedLiteral  struct{} `json:"-"`
	XXX_unrecognized      []byte   `json:"-"`
	XXX_sizecache         int32    `json:"-"`
}

Arrow RecordBatch.

func (*ArrowRecordBatch) Descriptor

func (*ArrowRecordBatch) Descriptor() ([]byte, []int)

func (*ArrowRecordBatch) GetSerializedRecordBatch

func (m *ArrowRecordBatch) GetSerializedRecordBatch() []byte

func (*ArrowRecordBatch) ProtoMessage

func (*ArrowRecordBatch) ProtoMessage()

func (*ArrowRecordBatch) Reset

func (m *ArrowRecordBatch) Reset()

func (*ArrowRecordBatch) String

func (m *ArrowRecordBatch) String() string

func (*ArrowRecordBatch) XXX_DiscardUnknown

func (m *ArrowRecordBatch) XXX_DiscardUnknown()

func (*ArrowRecordBatch) XXX_Marshal

func (m *ArrowRecordBatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ArrowRecordBatch) XXX_Merge

func (m *ArrowRecordBatch) XXX_Merge(src proto.Message)

func (*ArrowRecordBatch) XXX_Size

func (m *ArrowRecordBatch) XXX_Size() int

func (*ArrowRecordBatch) XXX_Unmarshal

func (m *ArrowRecordBatch) XXX_Unmarshal(b []byte) error

type ArrowSchema

type ArrowSchema struct {
	// IPC serialized Arrow schema.
	SerializedSchema     []byte   `protobuf:"bytes,1,opt,name=serialized_schema,json=serializedSchema,proto3" json:"serialized_schema,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Arrow schema as specified in https://arrow.apache.org/docs/python/api/datatypes.html and serialized to bytes using IPC: https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc

See code samples on how this message can be deserialized.

func (*ArrowSchema) Descriptor

func (*ArrowSchema) Descriptor() ([]byte, []int)

func (*ArrowSchema) GetSerializedSchema

func (m *ArrowSchema) GetSerializedSchema() []byte

func (*ArrowSchema) ProtoMessage

func (*ArrowSchema) ProtoMessage()

func (*ArrowSchema) Reset

func (m *ArrowSchema) Reset()

func (*ArrowSchema) String

func (m *ArrowSchema) String() string

func (*ArrowSchema) XXX_DiscardUnknown

func (m *ArrowSchema) XXX_DiscardUnknown()

func (*ArrowSchema) XXX_Marshal

func (m *ArrowSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ArrowSchema) XXX_Merge

func (m *ArrowSchema) XXX_Merge(src proto.Message)

func (*ArrowSchema) XXX_Size

func (m *ArrowSchema) XXX_Size() int

func (*ArrowSchema) XXX_Unmarshal

func (m *ArrowSchema) XXX_Unmarshal(b []byte) error

type ArrowSerializationOptions

type ArrowSerializationOptions struct {
	// The Arrow IPC format to use.
	Format               ArrowSerializationOptions_Format `` /* 142-byte string literal not displayed */
	XXX_NoUnkeyedLiteral struct{}                         `json:"-"`
	XXX_unrecognized     []byte                           `json:"-"`
	XXX_sizecache        int32                            `json:"-"`
}

Contains options specific to Arrow Serialization.

func (*ArrowSerializationOptions) Descriptor

func (*ArrowSerializationOptions) Descriptor() ([]byte, []int)

func (*ArrowSerializationOptions) GetFormat

func (*ArrowSerializationOptions) ProtoMessage

func (*ArrowSerializationOptions) ProtoMessage()

func (*ArrowSerializationOptions) Reset

func (m *ArrowSerializationOptions) Reset()

func (*ArrowSerializationOptions) String

func (m *ArrowSerializationOptions) String() string

func (*ArrowSerializationOptions) XXX_DiscardUnknown

func (m *ArrowSerializationOptions) XXX_DiscardUnknown()

func (*ArrowSerializationOptions) XXX_Marshal

func (m *ArrowSerializationOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ArrowSerializationOptions) XXX_Merge

func (m *ArrowSerializationOptions) XXX_Merge(src proto.Message)

func (*ArrowSerializationOptions) XXX_Size

func (m *ArrowSerializationOptions) XXX_Size() int

func (*ArrowSerializationOptions) XXX_Unmarshal

func (m *ArrowSerializationOptions) XXX_Unmarshal(b []byte) error

type ArrowSerializationOptions_Format

type ArrowSerializationOptions_Format int32

The IPC format to use when serializing Arrow streams.

const (
	// If unspecied the IPC format as of 0.15 release will be used.
	ArrowSerializationOptions_FORMAT_UNSPECIFIED ArrowSerializationOptions_Format = 0
	// Use the legacy IPC message format as of Apache Arrow Release 0.14.
	ArrowSerializationOptions_ARROW_0_14 ArrowSerializationOptions_Format = 1
	// Use the message format as of Apache Arrow Release 0.15.
	ArrowSerializationOptions_ARROW_0_15 ArrowSerializationOptions_Format = 2
)

func (ArrowSerializationOptions_Format) EnumDescriptor

func (ArrowSerializationOptions_Format) EnumDescriptor() ([]byte, []int)

func (ArrowSerializationOptions_Format) String

type AvroRows

type AvroRows struct {
	// Binary serialized rows in a block.
	SerializedBinaryRows []byte   `protobuf:"bytes,1,opt,name=serialized_binary_rows,json=serializedBinaryRows,proto3" json:"serialized_binary_rows,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Avro rows.

func (*AvroRows) Descriptor

func (*AvroRows) Descriptor() ([]byte, []int)

func (*AvroRows) GetSerializedBinaryRows

func (m *AvroRows) GetSerializedBinaryRows() []byte

func (*AvroRows) ProtoMessage

func (*AvroRows) ProtoMessage()

func (*AvroRows) Reset

func (m *AvroRows) Reset()

func (*AvroRows) String

func (m *AvroRows) String() string

func (*AvroRows) XXX_DiscardUnknown

func (m *AvroRows) XXX_DiscardUnknown()

func (*AvroRows) XXX_Marshal

func (m *AvroRows) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*AvroRows) XXX_Merge

func (m *AvroRows) XXX_Merge(src proto.Message)

func (*AvroRows) XXX_Size

func (m *AvroRows) XXX_Size() int

func (*AvroRows) XXX_Unmarshal

func (m *AvroRows) XXX_Unmarshal(b []byte) error

type AvroSchema

type AvroSchema struct {
	// Json serialized schema, as described at
	// https://avro.apache.org/docs/1.8.1/spec.html.
	Schema               string   `protobuf:"bytes,1,opt,name=schema,proto3" json:"schema,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Avro schema.

func (*AvroSchema) Descriptor

func (*AvroSchema) Descriptor() ([]byte, []int)

func (*AvroSchema) GetSchema

func (m *AvroSchema) GetSchema() string

func (*AvroSchema) ProtoMessage

func (*AvroSchema) ProtoMessage()

func (*AvroSchema) Reset

func (m *AvroSchema) Reset()

func (*AvroSchema) String

func (m *AvroSchema) String() string

func (*AvroSchema) XXX_DiscardUnknown

func (m *AvroSchema) XXX_DiscardUnknown()

func (*AvroSchema) XXX_Marshal

func (m *AvroSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*AvroSchema) XXX_Merge

func (m *AvroSchema) XXX_Merge(src proto.Message)

func (*AvroSchema) XXX_Size

func (m *AvroSchema) XXX_Size() int

func (*AvroSchema) XXX_Unmarshal

func (m *AvroSchema) XXX_Unmarshal(b []byte) error

type BigQueryReadClient

type BigQueryReadClient interface {
	// Creates a new read session. A read session divides the contents of a
	// BigQuery table into one or more streams, which can then be used to read
	// data from the table. The read session also specifies properties of the
	// data to be read, such as a list of columns or a push-down filter describing
	// the rows to be returned.
	//
	// A particular row can be read by at most one stream. When the caller has
	// reached the end of each stream in the session, then all the data in the
	// table has been read.
	//
	// Data is assigned to each stream such that roughly the same number of
	// rows can be read from each stream. Because the server-side unit for
	// assigning data is collections of rows, the API does not guarantee that
	// each stream will return the same number or rows. Additionally, the
	// limits are enforced based on the number of pre-filtered rows, so some
	// filters can lead to lopsided assignments.
	//
	// Read sessions automatically expire 24 hours after they are created and do
	// not require manual clean-up by the caller.
	CreateReadSession(ctx context.Context, in *CreateReadSessionRequest, opts ...grpc.CallOption) (*ReadSession, error)
	// Reads rows from the stream in the format prescribed by the ReadSession.
	// Each response contains one or more table rows, up to a maximum of 100 MiB
	// per response; read requests which attempt to read individual rows larger
	// than 100 MiB will fail.
	//
	// Each request also returns a set of stream statistics reflecting the current
	// state of the stream.
	ReadRows(ctx context.Context, in *ReadRowsRequest, opts ...grpc.CallOption) (BigQueryRead_ReadRowsClient, error)
	// Splits a given `ReadStream` into two `ReadStream` objects. These
	// `ReadStream` objects are referred to as the primary and the residual
	// streams of the split. The original `ReadStream` can still be read from in
	// the same manner as before. Both of the returned `ReadStream` objects can
	// also be read from, and the rows returned by both child streams will be
	// the same as the rows read from the original stream.
	//
	// Moreover, the two child streams will be allocated back-to-back in the
	// original `ReadStream`. Concretely, it is guaranteed that for streams
	// original, primary, and residual, that original[0-j] = primary[0-j] and
	// original[j-n] = residual[0-m] once the streams have been read to
	// completion.
	SplitReadStream(ctx context.Context, in *SplitReadStreamRequest, opts ...grpc.CallOption) (*SplitReadStreamResponse, error)
}

BigQueryReadClient is the client API for BigQueryRead service.

For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.

type BigQueryReadServer

type BigQueryReadServer interface {
	// Creates a new read session. A read session divides the contents of a
	// BigQuery table into one or more streams, which can then be used to read
	// data from the table. The read session also specifies properties of the
	// data to be read, such as a list of columns or a push-down filter describing
	// the rows to be returned.
	//
	// A particular row can be read by at most one stream. When the caller has
	// reached the end of each stream in the session, then all the data in the
	// table has been read.
	//
	// Data is assigned to each stream such that roughly the same number of
	// rows can be read from each stream. Because the server-side unit for
	// assigning data is collections of rows, the API does not guarantee that
	// each stream will return the same number or rows. Additionally, the
	// limits are enforced based on the number of pre-filtered rows, so some
	// filters can lead to lopsided assignments.
	//
	// Read sessions automatically expire 24 hours after they are created and do
	// not require manual clean-up by the caller.
	CreateReadSession(context.Context, *CreateReadSessionRequest) (*ReadSession, error)
	// Reads rows from the stream in the format prescribed by the ReadSession.
	// Each response contains one or more table rows, up to a maximum of 100 MiB
	// per response; read requests which attempt to read individual rows larger
	// than 100 MiB will fail.
	//
	// Each request also returns a set of stream statistics reflecting the current
	// state of the stream.
	ReadRows(*ReadRowsRequest, BigQueryRead_ReadRowsServer) error
	// Splits a given `ReadStream` into two `ReadStream` objects. These
	// `ReadStream` objects are referred to as the primary and the residual
	// streams of the split. The original `ReadStream` can still be read from in
	// the same manner as before. Both of the returned `ReadStream` objects can
	// also be read from, and the rows returned by both child streams will be
	// the same as the rows read from the original stream.
	//
	// Moreover, the two child streams will be allocated back-to-back in the
	// original `ReadStream`. Concretely, it is guaranteed that for streams
	// original, primary, and residual, that original[0-j] = primary[0-j] and
	// original[j-n] = residual[0-m] once the streams have been read to
	// completion.
	SplitReadStream(context.Context, *SplitReadStreamRequest) (*SplitReadStreamResponse, error)
}

BigQueryReadServer is the server API for BigQueryRead service.

type BigQueryRead_ReadRowsClient

type BigQueryRead_ReadRowsClient interface {
	Recv() (*ReadRowsResponse, error)
	grpc.ClientStream
}

type BigQueryRead_ReadRowsServer

type BigQueryRead_ReadRowsServer interface {
	Send(*ReadRowsResponse) error
	grpc.ServerStream
}

type CreateReadSessionRequest

type CreateReadSessionRequest struct {
	// Required. The request project that owns the session, in the form of
	// `projects/{project_id}`.
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// Required. Session to be created.
	ReadSession *ReadSession `protobuf:"bytes,2,opt,name=read_session,json=readSession,proto3" json:"read_session,omitempty"`
	// Max initial number of streams. If unset or zero, the server will
	// provide a value of streams so as to produce reasonable throughput. Must be
	// non-negative. The number of streams may be lower than the requested number,
	// depending on the amount parallelism that is reasonable for the table. Error
	// will be returned if the max count is greater than the current system
	// max limit of 1,000.
	//
	// Streams must be read starting from offset 0.
	MaxStreamCount       int32    `protobuf:"varint,3,opt,name=max_stream_count,json=maxStreamCount,proto3" json:"max_stream_count,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Request message for `CreateReadSession`.

func (*CreateReadSessionRequest) Descriptor

func (*CreateReadSessionRequest) Descriptor() ([]byte, []int)

func (*CreateReadSessionRequest) GetMaxStreamCount

func (m *CreateReadSessionRequest) GetMaxStreamCount() int32

func (*CreateReadSessionRequest) GetParent

func (m *CreateReadSessionRequest) GetParent() string

func (*CreateReadSessionRequest) GetReadSession

func (m *CreateReadSessionRequest) GetReadSession() *ReadSession

func (*CreateReadSessionRequest) ProtoMessage

func (*CreateReadSessionRequest) ProtoMessage()

func (*CreateReadSessionRequest) Reset

func (m *CreateReadSessionRequest) Reset()

func (*CreateReadSessionRequest) String

func (m *CreateReadSessionRequest) String() string

func (*CreateReadSessionRequest) XXX_DiscardUnknown

func (m *CreateReadSessionRequest) XXX_DiscardUnknown()

func (*CreateReadSessionRequest) XXX_Marshal

func (m *CreateReadSessionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*CreateReadSessionRequest) XXX_Merge

func (m *CreateReadSessionRequest) XXX_Merge(src proto.Message)

func (*CreateReadSessionRequest) XXX_Size

func (m *CreateReadSessionRequest) XXX_Size() int

func (*CreateReadSessionRequest) XXX_Unmarshal

func (m *CreateReadSessionRequest) XXX_Unmarshal(b []byte) error

type DataFormat

type DataFormat int32

Data format for input or output data.

const (
	DataFormat_DATA_FORMAT_UNSPECIFIED DataFormat = 0
	// Avro is a standard open source row based file format.
	// See https://avro.apache.org/ for more details.
	DataFormat_AVRO DataFormat = 1
	// Arrow is a standard open source column-based message format.
	// See https://arrow.apache.org/ for more details.
	DataFormat_ARROW DataFormat = 2
)

func (DataFormat) EnumDescriptor

func (DataFormat) EnumDescriptor() ([]byte, []int)

func (DataFormat) String

func (x DataFormat) String() string

type ReadRowsRequest

type ReadRowsRequest struct {
	// Required. Stream to read rows from.
	ReadStream string `protobuf:"bytes,1,opt,name=read_stream,json=readStream,proto3" json:"read_stream,omitempty"`
	// The offset requested must be less than the last row read from Read.
	// Requesting a larger offset is undefined. If not specified, start reading
	// from offset zero.
	Offset               int64    `protobuf:"varint,2,opt,name=offset,proto3" json:"offset,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Request message for `ReadRows`.

func (*ReadRowsRequest) Descriptor

func (*ReadRowsRequest) Descriptor() ([]byte, []int)

func (*ReadRowsRequest) GetOffset

func (m *ReadRowsRequest) GetOffset() int64

func (*ReadRowsRequest) GetReadStream

func (m *ReadRowsRequest) GetReadStream() string

func (*ReadRowsRequest) ProtoMessage

func (*ReadRowsRequest) ProtoMessage()

func (*ReadRowsRequest) Reset

func (m *ReadRowsRequest) Reset()

func (*ReadRowsRequest) String

func (m *ReadRowsRequest) String() string

func (*ReadRowsRequest) XXX_DiscardUnknown

func (m *ReadRowsRequest) XXX_DiscardUnknown()

func (*ReadRowsRequest) XXX_Marshal

func (m *ReadRowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ReadRowsRequest) XXX_Merge

func (m *ReadRowsRequest) XXX_Merge(src proto.Message)

func (*ReadRowsRequest) XXX_Size

func (m *ReadRowsRequest) XXX_Size() int

func (*ReadRowsRequest) XXX_Unmarshal

func (m *ReadRowsRequest) XXX_Unmarshal(b []byte) error

type ReadRowsResponse

type ReadRowsResponse struct {
	// Row data is returned in format specified during session creation.
	//
	// Types that are valid to be assigned to Rows:
	//	*ReadRowsResponse_AvroRows
	//	*ReadRowsResponse_ArrowRecordBatch
	Rows isReadRowsResponse_Rows `protobuf_oneof:"rows"`
	// Number of serialized rows in the rows block.
	RowCount int64 `protobuf:"varint,6,opt,name=row_count,json=rowCount,proto3" json:"row_count,omitempty"`
	// Statistics for the stream.
	Stats *StreamStats `protobuf:"bytes,2,opt,name=stats,proto3" json:"stats,omitempty"`
	// Throttling state. If unset, the latest response still describes
	// the current throttling status.
	ThrottleState        *ThrottleState `protobuf:"bytes,5,opt,name=throttle_state,json=throttleState,proto3" json:"throttle_state,omitempty"`
	XXX_NoUnkeyedLiteral struct{}       `json:"-"`
	XXX_unrecognized     []byte         `json:"-"`
	XXX_sizecache        int32          `json:"-"`
}

Response from calling `ReadRows` may include row data, progress and throttling information.

func (*ReadRowsResponse) Descriptor

func (*ReadRowsResponse) Descriptor() ([]byte, []int)

func (*ReadRowsResponse) GetArrowRecordBatch

func (m *ReadRowsResponse) GetArrowRecordBatch() *ArrowRecordBatch

func (*ReadRowsResponse) GetAvroRows

func (m *ReadRowsResponse) GetAvroRows() *AvroRows

func (*ReadRowsResponse) GetRowCount

func (m *ReadRowsResponse) GetRowCount() int64

func (*ReadRowsResponse) GetRows

func (m *ReadRowsResponse) GetRows() isReadRowsResponse_Rows

func (*ReadRowsResponse) GetStats

func (m *ReadRowsResponse) GetStats() *StreamStats

func (*ReadRowsResponse) GetThrottleState

func (m *ReadRowsResponse) GetThrottleState() *ThrottleState

func (*ReadRowsResponse) ProtoMessage

func (*ReadRowsResponse) ProtoMessage()

func (*ReadRowsResponse) Reset

func (m *ReadRowsResponse) Reset()

func (*ReadRowsResponse) String

func (m *ReadRowsResponse) String() string

func (*ReadRowsResponse) XXX_DiscardUnknown

func (m *ReadRowsResponse) XXX_DiscardUnknown()

func (*ReadRowsResponse) XXX_Marshal

func (m *ReadRowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ReadRowsResponse) XXX_Merge

func (m *ReadRowsResponse) XXX_Merge(src proto.Message)

func (*ReadRowsResponse) XXX_OneofWrappers

func (*ReadRowsResponse) XXX_OneofWrappers() []interface{}

XXX_OneofWrappers is for the internal use of the proto package.

func (*ReadRowsResponse) XXX_Size

func (m *ReadRowsResponse) XXX_Size() int

func (*ReadRowsResponse) XXX_Unmarshal

func (m *ReadRowsResponse) XXX_Unmarshal(b []byte) error

type ReadRowsResponse_ArrowRecordBatch

type ReadRowsResponse_ArrowRecordBatch struct {
	ArrowRecordBatch *ArrowRecordBatch `protobuf:"bytes,4,opt,name=arrow_record_batch,json=arrowRecordBatch,proto3,oneof"`
}

type ReadRowsResponse_AvroRows

type ReadRowsResponse_AvroRows struct {
	AvroRows *AvroRows `protobuf:"bytes,3,opt,name=avro_rows,json=avroRows,proto3,oneof"`
}

type ReadSession

type ReadSession struct {
	// Output only. Unique identifier for the session, in the form
	// `projects/{project_id}/locations/{location}/sessions/{session_id}`.
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Output only. Time at which the session becomes invalid. After this time, subsequent
	// requests to read this Session will return errors. The expire_time is
	// automatically assigned and currently cannot be specified or updated.
	ExpireTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"`
	// Immutable. Data format of the output data.
	DataFormat DataFormat `` /* 146-byte string literal not displayed */
	// The schema for the read. If read_options.selected_fields is set, the
	// schema may be different from the table schema as it will only contain
	// the selected fields.
	//
	// Types that are valid to be assigned to Schema:
	//	*ReadSession_AvroSchema
	//	*ReadSession_ArrowSchema
	Schema isReadSession_Schema `protobuf_oneof:"schema"`
	// Immutable. Table that this ReadSession is reading from, in the form
	// `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}
	Table string `protobuf:"bytes,6,opt,name=table,proto3" json:"table,omitempty"`
	// Optional. Any modifiers which are applied when reading from the specified table.
	TableModifiers *ReadSession_TableModifiers `protobuf:"bytes,7,opt,name=table_modifiers,json=tableModifiers,proto3" json:"table_modifiers,omitempty"`
	// Optional. Read options for this session (e.g. column selection, filters).
	ReadOptions *ReadSession_TableReadOptions `protobuf:"bytes,8,opt,name=read_options,json=readOptions,proto3" json:"read_options,omitempty"`
	// Output only. A list of streams created with the session.
	//
	// At least one stream is created with the session. In the future, larger
	// request_stream_count values *may* result in this list being unpopulated,
	// in that case, the user will need to use a List method to get the streams
	// instead, which is not yet available.
	Streams              []*ReadStream `protobuf:"bytes,10,rep,name=streams,proto3" json:"streams,omitempty"`
	XXX_NoUnkeyedLiteral struct{}      `json:"-"`
	XXX_unrecognized     []byte        `json:"-"`
	XXX_sizecache        int32         `json:"-"`
}

Information about the ReadSession.

func (*ReadSession) Descriptor

func (*ReadSession) Descriptor() ([]byte, []int)

func (*ReadSession) GetArrowSchema

func (m *ReadSession) GetArrowSchema() *ArrowSchema

func (*ReadSession) GetAvroSchema

func (m *ReadSession) GetAvroSchema() *AvroSchema

func (*ReadSession) GetDataFormat

func (m *ReadSession) GetDataFormat() DataFormat

func (*ReadSession) GetExpireTime

func (m *ReadSession) GetExpireTime() *timestamp.Timestamp

func (*ReadSession) GetName

func (m *ReadSession) GetName() string

func (*ReadSession) GetReadOptions

func (m *ReadSession) GetReadOptions() *ReadSession_TableReadOptions

func (*ReadSession) GetSchema

func (m *ReadSession) GetSchema() isReadSession_Schema

func (*ReadSession) GetStreams

func (m *ReadSession) GetStreams() []*ReadStream

func (*ReadSession) GetTable

func (m *ReadSession) GetTable() string

func (*ReadSession) GetTableModifiers

func (m *ReadSession) GetTableModifiers() *ReadSession_TableModifiers

func (*ReadSession) ProtoMessage

func (*ReadSession) ProtoMessage()

func (*ReadSession) Reset

func (m *ReadSession) Reset()

func (*ReadSession) String

func (m *ReadSession) String() string

func (*ReadSession) XXX_DiscardUnknown

func (m *ReadSession) XXX_DiscardUnknown()

func (*ReadSession) XXX_Marshal

func (m *ReadSession) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ReadSession) XXX_Merge

func (m *ReadSession) XXX_Merge(src proto.Message)

func (*ReadSession) XXX_OneofWrappers

func (*ReadSession) XXX_OneofWrappers() []interface{}

XXX_OneofWrappers is for the internal use of the proto package.

func (*ReadSession) XXX_Size

func (m *ReadSession) XXX_Size() int

func (*ReadSession) XXX_Unmarshal

func (m *ReadSession) XXX_Unmarshal(b []byte) error

type ReadSession_ArrowSchema

type ReadSession_ArrowSchema struct {
	ArrowSchema *ArrowSchema `protobuf:"bytes,5,opt,name=arrow_schema,json=arrowSchema,proto3,oneof"`
}

type ReadSession_AvroSchema

type ReadSession_AvroSchema struct {
	AvroSchema *AvroSchema `protobuf:"bytes,4,opt,name=avro_schema,json=avroSchema,proto3,oneof"`
}

type ReadSession_TableModifiers

type ReadSession_TableModifiers struct {
	// The snapshot time of the table. If not set, interpreted as now.
	SnapshotTime         *timestamp.Timestamp `protobuf:"bytes,1,opt,name=snapshot_time,json=snapshotTime,proto3" json:"snapshot_time,omitempty"`
	XXX_NoUnkeyedLiteral struct{}             `json:"-"`
	XXX_unrecognized     []byte               `json:"-"`
	XXX_sizecache        int32                `json:"-"`
}

Additional attributes when reading a table.

func (*ReadSession_TableModifiers) Descriptor

func (*ReadSession_TableModifiers) Descriptor() ([]byte, []int)

func (*ReadSession_TableModifiers) GetSnapshotTime

func (m *ReadSession_TableModifiers) GetSnapshotTime() *timestamp.Timestamp

func (*ReadSession_TableModifiers) ProtoMessage

func (*ReadSession_TableModifiers) ProtoMessage()

func (*ReadSession_TableModifiers) Reset

func (m *ReadSession_TableModifiers) Reset()

func (*ReadSession_TableModifiers) String

func (m *ReadSession_TableModifiers) String() string

func (*ReadSession_TableModifiers) XXX_DiscardUnknown

func (m *ReadSession_TableModifiers) XXX_DiscardUnknown()

func (*ReadSession_TableModifiers) XXX_Marshal

func (m *ReadSession_TableModifiers) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ReadSession_TableModifiers) XXX_Merge

func (m *ReadSession_TableModifiers) XXX_Merge(src proto.Message)

func (*ReadSession_TableModifiers) XXX_Size

func (m *ReadSession_TableModifiers) XXX_Size() int

func (*ReadSession_TableModifiers) XXX_Unmarshal

func (m *ReadSession_TableModifiers) XXX_Unmarshal(b []byte) error

type ReadSession_TableReadOptions

type ReadSession_TableReadOptions struct {
	// Names of the fields in the table that should be read. If empty, all
	// fields will be read. If the specified field is a nested field, all
	// the sub-fields in the field will be selected. The output field order is
	// unrelated to the order of fields in selected_fields.
	SelectedFields []string `protobuf:"bytes,1,rep,name=selected_fields,json=selectedFields,proto3" json:"selected_fields,omitempty"`
	// SQL text filtering statement, similar to a WHERE clause in a query.
	// Aggregates are not supported.
	//
	// Examples: "int_field > 5"
	//           "date_field = CAST('2014-9-27' as DATE)"
	//           "nullable_field is not NULL"
	//           "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))"
	//           "numeric_field BETWEEN 1.0 AND 5.0"
	RowRestriction string `protobuf:"bytes,2,opt,name=row_restriction,json=rowRestriction,proto3" json:"row_restriction,omitempty"`
	// Optional. Options specific to the Apache Arrow output format.
	ArrowSerializationOptions *ArrowSerializationOptions `` /* 138-byte string literal not displayed */
	XXX_NoUnkeyedLiteral      struct{}                   `json:"-"`
	XXX_unrecognized          []byte                     `json:"-"`
	XXX_sizecache             int32                      `json:"-"`
}

Options dictating how we read a table.

func (*ReadSession_TableReadOptions) Descriptor

func (*ReadSession_TableReadOptions) Descriptor() ([]byte, []int)

func (*ReadSession_TableReadOptions) GetArrowSerializationOptions

func (m *ReadSession_TableReadOptions) GetArrowSerializationOptions() *ArrowSerializationOptions

func (*ReadSession_TableReadOptions) GetRowRestriction

func (m *ReadSession_TableReadOptions) GetRowRestriction() string

func (*ReadSession_TableReadOptions) GetSelectedFields

func (m *ReadSession_TableReadOptions) GetSelectedFields() []string

func (*ReadSession_TableReadOptions) ProtoMessage

func (*ReadSession_TableReadOptions) ProtoMessage()

func (*ReadSession_TableReadOptions) Reset

func (m *ReadSession_TableReadOptions) Reset()

func (*ReadSession_TableReadOptions) String

func (*ReadSession_TableReadOptions) XXX_DiscardUnknown

func (m *ReadSession_TableReadOptions) XXX_DiscardUnknown()

func (*ReadSession_TableReadOptions) XXX_Marshal

func (m *ReadSession_TableReadOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ReadSession_TableReadOptions) XXX_Merge

func (m *ReadSession_TableReadOptions) XXX_Merge(src proto.Message)

func (*ReadSession_TableReadOptions) XXX_Size

func (m *ReadSession_TableReadOptions) XXX_Size() int

func (*ReadSession_TableReadOptions) XXX_Unmarshal

func (m *ReadSession_TableReadOptions) XXX_Unmarshal(b []byte) error

type ReadStream

type ReadStream struct {
	// Output only. Name of the stream, in the form
	// `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`.
	Name                 string   `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Information about a single stream that gets data out of the storage system. Most of the information about `ReadStream` instances is aggregated, making `ReadStream` lightweight.

func (*ReadStream) Descriptor

func (*ReadStream) Descriptor() ([]byte, []int)

func (*ReadStream) GetName

func (m *ReadStream) GetName() string

func (*ReadStream) ProtoMessage

func (*ReadStream) ProtoMessage()

func (*ReadStream) Reset

func (m *ReadStream) Reset()

func (*ReadStream) String

func (m *ReadStream) String() string

func (*ReadStream) XXX_DiscardUnknown

func (m *ReadStream) XXX_DiscardUnknown()

func (*ReadStream) XXX_Marshal

func (m *ReadStream) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ReadStream) XXX_Merge

func (m *ReadStream) XXX_Merge(src proto.Message)

func (*ReadStream) XXX_Size

func (m *ReadStream) XXX_Size() int

func (*ReadStream) XXX_Unmarshal

func (m *ReadStream) XXX_Unmarshal(b []byte) error

type SplitReadStreamRequest

type SplitReadStreamRequest struct {
	// Required. Name of the stream to split.
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// A value in the range (0.0, 1.0) that specifies the fractional point at
	// which the original stream should be split. The actual split point is
	// evaluated on pre-filtered rows, so if a filter is provided, then there is
	// no guarantee that the division of the rows between the new child streams
	// will be proportional to this fractional value. Additionally, because the
	// server-side unit for assigning data is collections of rows, this fraction
	// will always map to a data storage boundary on the server side.
	Fraction             float64  `protobuf:"fixed64,2,opt,name=fraction,proto3" json:"fraction,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Request message for `SplitReadStream`.

func (*SplitReadStreamRequest) Descriptor

func (*SplitReadStreamRequest) Descriptor() ([]byte, []int)

func (*SplitReadStreamRequest) GetFraction

func (m *SplitReadStreamRequest) GetFraction() float64

func (*SplitReadStreamRequest) GetName

func (m *SplitReadStreamRequest) GetName() string

func (*SplitReadStreamRequest) ProtoMessage

func (*SplitReadStreamRequest) ProtoMessage()

func (*SplitReadStreamRequest) Reset

func (m *SplitReadStreamRequest) Reset()

func (*SplitReadStreamRequest) String

func (m *SplitReadStreamRequest) String() string

func (*SplitReadStreamRequest) XXX_DiscardUnknown

func (m *SplitReadStreamRequest) XXX_DiscardUnknown()

func (*SplitReadStreamRequest) XXX_Marshal

func (m *SplitReadStreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SplitReadStreamRequest) XXX_Merge

func (m *SplitReadStreamRequest) XXX_Merge(src proto.Message)

func (*SplitReadStreamRequest) XXX_Size

func (m *SplitReadStreamRequest) XXX_Size() int

func (*SplitReadStreamRequest) XXX_Unmarshal

func (m *SplitReadStreamRequest) XXX_Unmarshal(b []byte) error

type SplitReadStreamResponse

type SplitReadStreamResponse struct {
	// Primary stream, which contains the beginning portion of
	// |original_stream|. An empty value indicates that the original stream can no
	// longer be split.
	PrimaryStream *ReadStream `protobuf:"bytes,1,opt,name=primary_stream,json=primaryStream,proto3" json:"primary_stream,omitempty"`
	// Remainder stream, which contains the tail of |original_stream|. An empty
	// value indicates that the original stream can no longer be split.
	RemainderStream      *ReadStream `protobuf:"bytes,2,opt,name=remainder_stream,json=remainderStream,proto3" json:"remainder_stream,omitempty"`
	XXX_NoUnkeyedLiteral struct{}    `json:"-"`
	XXX_unrecognized     []byte      `json:"-"`
	XXX_sizecache        int32       `json:"-"`
}

Response message for `SplitReadStream`.

func (*SplitReadStreamResponse) Descriptor

func (*SplitReadStreamResponse) Descriptor() ([]byte, []int)

func (*SplitReadStreamResponse) GetPrimaryStream

func (m *SplitReadStreamResponse) GetPrimaryStream() *ReadStream

func (*SplitReadStreamResponse) GetRemainderStream

func (m *SplitReadStreamResponse) GetRemainderStream() *ReadStream

func (*SplitReadStreamResponse) ProtoMessage

func (*SplitReadStreamResponse) ProtoMessage()

func (*SplitReadStreamResponse) Reset

func (m *SplitReadStreamResponse) Reset()

func (*SplitReadStreamResponse) String

func (m *SplitReadStreamResponse) String() string

func (*SplitReadStreamResponse) XXX_DiscardUnknown

func (m *SplitReadStreamResponse) XXX_DiscardUnknown()

func (*SplitReadStreamResponse) XXX_Marshal

func (m *SplitReadStreamResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SplitReadStreamResponse) XXX_Merge

func (m *SplitReadStreamResponse) XXX_Merge(src proto.Message)

func (*SplitReadStreamResponse) XXX_Size

func (m *SplitReadStreamResponse) XXX_Size() int

func (*SplitReadStreamResponse) XXX_Unmarshal

func (m *SplitReadStreamResponse) XXX_Unmarshal(b []byte) error

type StreamStats

type StreamStats struct {
	// Represents the progress of the current stream.
	Progress             *StreamStats_Progress `protobuf:"bytes,2,opt,name=progress,proto3" json:"progress,omitempty"`
	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
	XXX_unrecognized     []byte                `json:"-"`
	XXX_sizecache        int32                 `json:"-"`
}

Estimated stream statistics for a given Stream.

func (*StreamStats) Descriptor

func (*StreamStats) Descriptor() ([]byte, []int)

func (*StreamStats) GetProgress

func (m *StreamStats) GetProgress() *StreamStats_Progress

func (*StreamStats) ProtoMessage

func (*StreamStats) ProtoMessage()

func (*StreamStats) Reset

func (m *StreamStats) Reset()

func (*StreamStats) String

func (m *StreamStats) String() string

func (*StreamStats) XXX_DiscardUnknown

func (m *StreamStats) XXX_DiscardUnknown()

func (*StreamStats) XXX_Marshal

func (m *StreamStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*StreamStats) XXX_Merge

func (m *StreamStats) XXX_Merge(src proto.Message)

func (*StreamStats) XXX_Size

func (m *StreamStats) XXX_Size() int

func (*StreamStats) XXX_Unmarshal

func (m *StreamStats) XXX_Unmarshal(b []byte) error

type StreamStats_Progress

type StreamStats_Progress struct {
	// The fraction of rows assigned to the stream that have been processed by
	// the server so far, not including the rows in the current response
	// message.
	//
	// This value, along with `at_response_end`, can be used to interpolate
	// the progress made as the rows in the message are being processed using
	// the following formula: `at_response_start + (at_response_end -
	// at_response_start) * rows_processed_from_response / rows_in_response`.
	//
	// Note that if a filter is provided, the `at_response_end` value of the
	// previous response may not necessarily be equal to the
	// `at_response_start` value of the current response.
	AtResponseStart float64 `protobuf:"fixed64,1,opt,name=at_response_start,json=atResponseStart,proto3" json:"at_response_start,omitempty"`
	// Similar to `at_response_start`, except that this value includes the
	// rows in the current response.
	AtResponseEnd        float64  `protobuf:"fixed64,2,opt,name=at_response_end,json=atResponseEnd,proto3" json:"at_response_end,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

func (*StreamStats_Progress) Descriptor

func (*StreamStats_Progress) Descriptor() ([]byte, []int)

func (*StreamStats_Progress) GetAtResponseEnd

func (m *StreamStats_Progress) GetAtResponseEnd() float64

func (*StreamStats_Progress) GetAtResponseStart

func (m *StreamStats_Progress) GetAtResponseStart() float64

func (*StreamStats_Progress) ProtoMessage

func (*StreamStats_Progress) ProtoMessage()

func (*StreamStats_Progress) Reset

func (m *StreamStats_Progress) Reset()

func (*StreamStats_Progress) String

func (m *StreamStats_Progress) String() string

func (*StreamStats_Progress) XXX_DiscardUnknown

func (m *StreamStats_Progress) XXX_DiscardUnknown()

func (*StreamStats_Progress) XXX_Marshal

func (m *StreamStats_Progress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*StreamStats_Progress) XXX_Merge

func (m *StreamStats_Progress) XXX_Merge(src proto.Message)

func (*StreamStats_Progress) XXX_Size

func (m *StreamStats_Progress) XXX_Size() int

func (*StreamStats_Progress) XXX_Unmarshal

func (m *StreamStats_Progress) XXX_Unmarshal(b []byte) error

type ThrottleState

type ThrottleState struct {
	// How much this connection is being throttled. Zero means no throttling,
	// 100 means fully throttled.
	ThrottlePercent      int32    `protobuf:"varint,1,opt,name=throttle_percent,json=throttlePercent,proto3" json:"throttle_percent,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Information on if the current connection is being throttled.

func (*ThrottleState) Descriptor

func (*ThrottleState) Descriptor() ([]byte, []int)

func (*ThrottleState) GetThrottlePercent

func (m *ThrottleState) GetThrottlePercent() int32

func (*ThrottleState) ProtoMessage

func (*ThrottleState) ProtoMessage()

func (*ThrottleState) Reset

func (m *ThrottleState) Reset()

func (*ThrottleState) String

func (m *ThrottleState) String() string

func (*ThrottleState) XXX_DiscardUnknown

func (m *ThrottleState) XXX_DiscardUnknown()

func (*ThrottleState) XXX_Marshal

func (m *ThrottleState) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ThrottleState) XXX_Merge

func (m *ThrottleState) XXX_Merge(src proto.Message)

func (*ThrottleState) XXX_Size

func (m *ThrottleState) XXX_Size() int

func (*ThrottleState) XXX_Unmarshal

func (m *ThrottleState) XXX_Unmarshal(b []byte) error

type UnimplementedBigQueryReadServer

type UnimplementedBigQueryReadServer struct {
}

UnimplementedBigQueryReadServer can be embedded to have forward compatible implementations.

func (*UnimplementedBigQueryReadServer) CreateReadSession

func (*UnimplementedBigQueryReadServer) ReadRows

func (*UnimplementedBigQueryReadServer) SplitReadStream

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL