storage

package
v0.0.0-...-8b5d7a1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 26, 2018 License: Apache-2.0 Imports: 7 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var DataFormat_name = map[int32]string{
	0: "DATA_FORMAT_UNSPECIFIED",
	1: "AVRO",
}
View Source
var DataFormat_value = map[string]int32{
	"DATA_FORMAT_UNSPECIFIED": 0,
	"AVRO":                    1,
}

Functions

func RegisterBigQueryStorageServer

func RegisterBigQueryStorageServer(s *grpc.Server, srv BigQueryStorageServer)

Types

type AvroRows

type AvroRows struct {
	// Binary serialized rows in a block.
	SerializedBinaryRows []byte `protobuf:"bytes,1,opt,name=serialized_binary_rows,json=serializedBinaryRows,proto3" json:"serialized_binary_rows,omitempty"`
	// The count of rows in the returning block.
	RowCount             int64    `protobuf:"varint,2,opt,name=row_count,json=rowCount,proto3" json:"row_count,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Avro rows.

func (*AvroRows) Descriptor

func (*AvroRows) Descriptor() ([]byte, []int)

func (*AvroRows) GetRowCount

func (m *AvroRows) GetRowCount() int64

func (*AvroRows) GetSerializedBinaryRows

func (m *AvroRows) GetSerializedBinaryRows() []byte

func (*AvroRows) ProtoMessage

func (*AvroRows) ProtoMessage()

func (*AvroRows) Reset

func (m *AvroRows) Reset()

func (*AvroRows) String

func (m *AvroRows) String() string

func (*AvroRows) XXX_DiscardUnknown

func (m *AvroRows) XXX_DiscardUnknown()

func (*AvroRows) XXX_Marshal

func (m *AvroRows) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*AvroRows) XXX_Merge

func (m *AvroRows) XXX_Merge(src proto.Message)

func (*AvroRows) XXX_Size

func (m *AvroRows) XXX_Size() int

func (*AvroRows) XXX_Unmarshal

func (m *AvroRows) XXX_Unmarshal(b []byte) error

type AvroSchema

type AvroSchema struct {
	// Json serialized schema, as described at
	// https://avro.apache.org/docs/1.8.1/spec.html
	Schema               string   `protobuf:"bytes,1,opt,name=schema,proto3" json:"schema,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Avro schema.

func (*AvroSchema) Descriptor

func (*AvroSchema) Descriptor() ([]byte, []int)

func (*AvroSchema) GetSchema

func (m *AvroSchema) GetSchema() string

func (*AvroSchema) ProtoMessage

func (*AvroSchema) ProtoMessage()

func (*AvroSchema) Reset

func (m *AvroSchema) Reset()

func (*AvroSchema) String

func (m *AvroSchema) String() string

func (*AvroSchema) XXX_DiscardUnknown

func (m *AvroSchema) XXX_DiscardUnknown()

func (*AvroSchema) XXX_Marshal

func (m *AvroSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*AvroSchema) XXX_Merge

func (m *AvroSchema) XXX_Merge(src proto.Message)

func (*AvroSchema) XXX_Size

func (m *AvroSchema) XXX_Size() int

func (*AvroSchema) XXX_Unmarshal

func (m *AvroSchema) XXX_Unmarshal(b []byte) error

type BatchCreateReadSessionStreamsRequest

type BatchCreateReadSessionStreamsRequest struct {
	// Required. Must be a non-expired session obtained from a call to
	// CreateReadSession. Only the name field needs to be set.
	Session *ReadSession `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"`
	// Required. Number of new streams requested. Must be positive.
	// Number of added streams may be less than this, see CreateReadSessionRequest
	// for more information.
	RequestedStreams     int32    `protobuf:"varint,2,opt,name=requested_streams,json=requestedStreams,proto3" json:"requested_streams,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

func (*BatchCreateReadSessionStreamsRequest) Descriptor

func (*BatchCreateReadSessionStreamsRequest) Descriptor() ([]byte, []int)

func (*BatchCreateReadSessionStreamsRequest) GetRequestedStreams

func (m *BatchCreateReadSessionStreamsRequest) GetRequestedStreams() int32

func (*BatchCreateReadSessionStreamsRequest) GetSession

func (*BatchCreateReadSessionStreamsRequest) ProtoMessage

func (*BatchCreateReadSessionStreamsRequest) ProtoMessage()

func (*BatchCreateReadSessionStreamsRequest) Reset

func (*BatchCreateReadSessionStreamsRequest) String

func (*BatchCreateReadSessionStreamsRequest) XXX_DiscardUnknown

func (m *BatchCreateReadSessionStreamsRequest) XXX_DiscardUnknown()

func (*BatchCreateReadSessionStreamsRequest) XXX_Marshal

func (m *BatchCreateReadSessionStreamsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*BatchCreateReadSessionStreamsRequest) XXX_Merge

func (*BatchCreateReadSessionStreamsRequest) XXX_Size

func (*BatchCreateReadSessionStreamsRequest) XXX_Unmarshal

func (m *BatchCreateReadSessionStreamsRequest) XXX_Unmarshal(b []byte) error

type BatchCreateReadSessionStreamsResponse

type BatchCreateReadSessionStreamsResponse struct {
	// Newly added streams.
	Streams              []*Stream `protobuf:"bytes,1,rep,name=streams,proto3" json:"streams,omitempty"`
	XXX_NoUnkeyedLiteral struct{}  `json:"-"`
	XXX_unrecognized     []byte    `json:"-"`
	XXX_sizecache        int32     `json:"-"`
}

func (*BatchCreateReadSessionStreamsResponse) Descriptor

func (*BatchCreateReadSessionStreamsResponse) Descriptor() ([]byte, []int)

func (*BatchCreateReadSessionStreamsResponse) GetStreams

func (m *BatchCreateReadSessionStreamsResponse) GetStreams() []*Stream

func (*BatchCreateReadSessionStreamsResponse) ProtoMessage

func (*BatchCreateReadSessionStreamsResponse) ProtoMessage()

func (*BatchCreateReadSessionStreamsResponse) Reset

func (*BatchCreateReadSessionStreamsResponse) String

func (*BatchCreateReadSessionStreamsResponse) XXX_DiscardUnknown

func (m *BatchCreateReadSessionStreamsResponse) XXX_DiscardUnknown()

func (*BatchCreateReadSessionStreamsResponse) XXX_Marshal

func (m *BatchCreateReadSessionStreamsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*BatchCreateReadSessionStreamsResponse) XXX_Merge

func (*BatchCreateReadSessionStreamsResponse) XXX_Size

func (*BatchCreateReadSessionStreamsResponse) XXX_Unmarshal

func (m *BatchCreateReadSessionStreamsResponse) XXX_Unmarshal(b []byte) error

type BigQueryStorageClient

type BigQueryStorageClient interface {
	// Creates a new read session. A read session divides the contents of a
	// BigQuery table into one or more streams, which can then be used to read
	// data from the table. The read session also specifies properties of the
	// data to be read, such as a list of columns or a push-down filter describing
	// the rows to be returned.
	//
	// A particular row can be read by at most one stream. When the caller has
	// reached the end of each stream in the session, then all the data in the
	// table has been read.
	//
	// Read sessions automatically expire 24 hours after they are created and do
	// not require manual clean-up by the caller.
	CreateReadSession(ctx context.Context, in *CreateReadSessionRequest, opts ...grpc.CallOption) (*ReadSession, error)
	// Reads rows from the table in the format prescribed by the read session.
	// Each response contains one or more table rows, up to a maximum of 10 MiB
	// per response; read requests which attempt to read individual rows larger
	// than this will fail.
	//
	// Each request also returns a set of stream statistics reflecting the
	// estimated total number of rows in the read stream. This number is computed
	// based on the total table size and the number of active streams in the read
	// session, and may change as other streams continue to read data.
	ReadRows(ctx context.Context, in *ReadRowsRequest, opts ...grpc.CallOption) (BigQueryStorage_ReadRowsClient, error)
	// Creates additional streams for a ReadSession. This API can be used to
	// dynamically adjust the parallelism of a batch processing task upwards by
	// adding additional workers.
	BatchCreateReadSessionStreams(ctx context.Context, in *BatchCreateReadSessionStreamsRequest, opts ...grpc.CallOption) (*BatchCreateReadSessionStreamsResponse, error)
	// Triggers the graceful termination of a single stream in a ReadSession. This
	// API can be used to dynamically adjust the parallelism of a batch processing
	// task downwards without losing data.
	//
	// This API does not delete the stream -- it remains visible in the
	// ReadSession, and any data processed by the stream is not released to other
	// streams. However, no additional data will be assigned to the stream once
	// this call completes. Callers must continue reading data on the stream until
	// the end of the stream is reached so that data which has already been
	// assigned to the stream will be processed.
	//
	// This method will return an error if there are no other live streams
	// in the Session, or if SplitReadStream() has been called on the given
	// Stream.
	FinalizeStream(ctx context.Context, in *FinalizeStreamRequest, opts ...grpc.CallOption) (*empty.Empty, error)
	// Splits a given read stream into two Streams. These streams are referred to
	// as the primary and the residual of the split. The original stream can still
	// be read from in the same manner as before. Both of the returned streams can
	// also be read from, and the total rows return by both child streams will be
	// the same as the rows read from the original stream.
	//
	// Moreover, the two child streams will be allocated back to back in the
	// original Stream. Concretely, it is guaranteed that for streams Original,
	// Primary, and Residual, that Original[0-j] = Primary[0-j] and
	// Original[j-n] = Residual[0-m] once the streams have been read to
	// completion.
	//
	// This method is guaranteed to be idempotent.
	SplitReadStream(ctx context.Context, in *SplitReadStreamRequest, opts ...grpc.CallOption) (*SplitReadStreamResponse, error)
}

BigQueryStorageClient is the client API for BigQueryStorage service.

For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.

func NewBigQueryStorageClient

func NewBigQueryStorageClient(cc *grpc.ClientConn) BigQueryStorageClient

type BigQueryStorageServer

type BigQueryStorageServer interface {
	// Creates a new read session. A read session divides the contents of a
	// BigQuery table into one or more streams, which can then be used to read
	// data from the table. The read session also specifies properties of the
	// data to be read, such as a list of columns or a push-down filter describing
	// the rows to be returned.
	//
	// A particular row can be read by at most one stream. When the caller has
	// reached the end of each stream in the session, then all the data in the
	// table has been read.
	//
	// Read sessions automatically expire 24 hours after they are created and do
	// not require manual clean-up by the caller.
	CreateReadSession(context.Context, *CreateReadSessionRequest) (*ReadSession, error)
	// Reads rows from the table in the format prescribed by the read session.
	// Each response contains one or more table rows, up to a maximum of 10 MiB
	// per response; read requests which attempt to read individual rows larger
	// than this will fail.
	//
	// Each request also returns a set of stream statistics reflecting the
	// estimated total number of rows in the read stream. This number is computed
	// based on the total table size and the number of active streams in the read
	// session, and may change as other streams continue to read data.
	ReadRows(*ReadRowsRequest, BigQueryStorage_ReadRowsServer) error
	// Creates additional streams for a ReadSession. This API can be used to
	// dynamically adjust the parallelism of a batch processing task upwards by
	// adding additional workers.
	BatchCreateReadSessionStreams(context.Context, *BatchCreateReadSessionStreamsRequest) (*BatchCreateReadSessionStreamsResponse, error)
	// Triggers the graceful termination of a single stream in a ReadSession. This
	// API can be used to dynamically adjust the parallelism of a batch processing
	// task downwards without losing data.
	//
	// This API does not delete the stream -- it remains visible in the
	// ReadSession, and any data processed by the stream is not released to other
	// streams. However, no additional data will be assigned to the stream once
	// this call completes. Callers must continue reading data on the stream until
	// the end of the stream is reached so that data which has already been
	// assigned to the stream will be processed.
	//
	// This method will return an error if there are no other live streams
	// in the Session, or if SplitReadStream() has been called on the given
	// Stream.
	FinalizeStream(context.Context, *FinalizeStreamRequest) (*empty.Empty, error)
	// Splits a given read stream into two Streams. These streams are referred to
	// as the primary and the residual of the split. The original stream can still
	// be read from in the same manner as before. Both of the returned streams can
	// also be read from, and the total rows return by both child streams will be
	// the same as the rows read from the original stream.
	//
	// Moreover, the two child streams will be allocated back to back in the
	// original Stream. Concretely, it is guaranteed that for streams Original,
	// Primary, and Residual, that Original[0-j] = Primary[0-j] and
	// Original[j-n] = Residual[0-m] once the streams have been read to
	// completion.
	//
	// This method is guaranteed to be idempotent.
	SplitReadStream(context.Context, *SplitReadStreamRequest) (*SplitReadStreamResponse, error)
}

BigQueryStorageServer is the server API for BigQueryStorage service.

type BigQueryStorage_ReadRowsClient

type BigQueryStorage_ReadRowsClient interface {
	Recv() (*ReadRowsResponse, error)
	grpc.ClientStream
}

type BigQueryStorage_ReadRowsServer

type BigQueryStorage_ReadRowsServer interface {
	Send(*ReadRowsResponse) error
	grpc.ServerStream
}

type CreateReadSessionRequest

type CreateReadSessionRequest struct {
	// Required. Reference to the table to read.
	TableReference *TableReference `protobuf:"bytes,1,opt,name=table_reference,json=tableReference,proto3" json:"table_reference,omitempty"`
	// Required. Project which this ReadSession is associated with. This is the
	// project that will be billed for usage.
	Parent string `protobuf:"bytes,6,opt,name=parent,proto3" json:"parent,omitempty"`
	// Optional. Any modifiers to the Table (e.g. snapshot timestamp).
	TableModifiers *TableModifiers `protobuf:"bytes,2,opt,name=table_modifiers,json=tableModifiers,proto3" json:"table_modifiers,omitempty"`
	// Optional. Initial number of streams. If unset or 0, we will
	// provide a value of streams so as to produce reasonable throughput. Must be
	// non-negative. The number of streams may be lower than the requested number,
	// depending on the amount parallelism that is reasonable for the table and
	// the maximum amount of parallelism allowed by the system.
	//
	// Streams must be read starting from offset 0.
	RequestedStreams int32 `protobuf:"varint,3,opt,name=requested_streams,json=requestedStreams,proto3" json:"requested_streams,omitempty"`
	// Optional. Read options for this session (e.g. column selection, filters).
	ReadOptions *TableReadOptions `protobuf:"bytes,4,opt,name=read_options,json=readOptions,proto3" json:"read_options,omitempty"`
	// Data output format. Currently default to Avro.
	Format               DataFormat `protobuf:"varint,5,opt,name=format,proto3,enum=google.cloud.bigquery.storage.v1beta1.DataFormat" json:"format,omitempty"`
	XXX_NoUnkeyedLiteral struct{}   `json:"-"`
	XXX_unrecognized     []byte     `json:"-"`
	XXX_sizecache        int32      `json:"-"`
}

func (*CreateReadSessionRequest) Descriptor

func (*CreateReadSessionRequest) Descriptor() ([]byte, []int)

func (*CreateReadSessionRequest) GetFormat

func (m *CreateReadSessionRequest) GetFormat() DataFormat

func (*CreateReadSessionRequest) GetParent

func (m *CreateReadSessionRequest) GetParent() string

func (*CreateReadSessionRequest) GetReadOptions

func (m *CreateReadSessionRequest) GetReadOptions() *TableReadOptions

func (*CreateReadSessionRequest) GetRequestedStreams

func (m *CreateReadSessionRequest) GetRequestedStreams() int32

func (*CreateReadSessionRequest) GetTableModifiers

func (m *CreateReadSessionRequest) GetTableModifiers() *TableModifiers

func (*CreateReadSessionRequest) GetTableReference

func (m *CreateReadSessionRequest) GetTableReference() *TableReference

func (*CreateReadSessionRequest) ProtoMessage

func (*CreateReadSessionRequest) ProtoMessage()

func (*CreateReadSessionRequest) Reset

func (m *CreateReadSessionRequest) Reset()

func (*CreateReadSessionRequest) String

func (m *CreateReadSessionRequest) String() string

func (*CreateReadSessionRequest) XXX_DiscardUnknown

func (m *CreateReadSessionRequest) XXX_DiscardUnknown()

func (*CreateReadSessionRequest) XXX_Marshal

func (m *CreateReadSessionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*CreateReadSessionRequest) XXX_Merge

func (m *CreateReadSessionRequest) XXX_Merge(src proto.Message)

func (*CreateReadSessionRequest) XXX_Size

func (m *CreateReadSessionRequest) XXX_Size() int

func (*CreateReadSessionRequest) XXX_Unmarshal

func (m *CreateReadSessionRequest) XXX_Unmarshal(b []byte) error

type DataFormat

type DataFormat int32

Data format for input or output data.

const (
	DataFormat_DATA_FORMAT_UNSPECIFIED DataFormat = 0
	// Avro is a standard open source row based file format.
	// See https://avro.apache.org/ for more details.
	DataFormat_AVRO DataFormat = 1
)

func (DataFormat) EnumDescriptor

func (DataFormat) EnumDescriptor() ([]byte, []int)

func (DataFormat) String

func (x DataFormat) String() string

type FinalizeStreamRequest

type FinalizeStreamRequest struct {
	// Stream to finalize.
	Stream               *Stream  `protobuf:"bytes,2,opt,name=stream,proto3" json:"stream,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

func (*FinalizeStreamRequest) Descriptor

func (*FinalizeStreamRequest) Descriptor() ([]byte, []int)

func (*FinalizeStreamRequest) GetStream

func (m *FinalizeStreamRequest) GetStream() *Stream

func (*FinalizeStreamRequest) ProtoMessage

func (*FinalizeStreamRequest) ProtoMessage()

func (*FinalizeStreamRequest) Reset

func (m *FinalizeStreamRequest) Reset()

func (*FinalizeStreamRequest) String

func (m *FinalizeStreamRequest) String() string

func (*FinalizeStreamRequest) XXX_DiscardUnknown

func (m *FinalizeStreamRequest) XXX_DiscardUnknown()

func (*FinalizeStreamRequest) XXX_Marshal

func (m *FinalizeStreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FinalizeStreamRequest) XXX_Merge

func (m *FinalizeStreamRequest) XXX_Merge(src proto.Message)

func (*FinalizeStreamRequest) XXX_Size

func (m *FinalizeStreamRequest) XXX_Size() int

func (*FinalizeStreamRequest) XXX_Unmarshal

func (m *FinalizeStreamRequest) XXX_Unmarshal(b []byte) error

type ReadRowsRequest

type ReadRowsRequest struct {
	// Required. Identifier of the position in the stream to start reading from.
	// The offset requested must be less than the last row read from ReadRows.
	// Requesting a larger offset is undefined.
	ReadPosition         *StreamPosition `protobuf:"bytes,1,opt,name=read_position,json=readPosition,proto3" json:"read_position,omitempty"`
	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
	XXX_unrecognized     []byte          `json:"-"`
	XXX_sizecache        int32           `json:"-"`
}

func (*ReadRowsRequest) Descriptor

func (*ReadRowsRequest) Descriptor() ([]byte, []int)

func (*ReadRowsRequest) GetReadPosition

func (m *ReadRowsRequest) GetReadPosition() *StreamPosition

func (*ReadRowsRequest) ProtoMessage

func (*ReadRowsRequest) ProtoMessage()

func (*ReadRowsRequest) Reset

func (m *ReadRowsRequest) Reset()

func (*ReadRowsRequest) String

func (m *ReadRowsRequest) String() string

func (*ReadRowsRequest) XXX_DiscardUnknown

func (m *ReadRowsRequest) XXX_DiscardUnknown()

func (*ReadRowsRequest) XXX_Marshal

func (m *ReadRowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ReadRowsRequest) XXX_Merge

func (m *ReadRowsRequest) XXX_Merge(src proto.Message)

func (*ReadRowsRequest) XXX_Size

func (m *ReadRowsRequest) XXX_Size() int

func (*ReadRowsRequest) XXX_Unmarshal

func (m *ReadRowsRequest) XXX_Unmarshal(b []byte) error

type ReadRowsResponse

type ReadRowsResponse struct {
	// Types that are valid to be assigned to Rows:
	//	*ReadRowsResponse_AvroRows
	Rows isReadRowsResponse_Rows `protobuf_oneof:"rows"`
	// Estimated stream statistics.
	Status *StreamStatus `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"`
	// Throttling status. If unset, the latest response still describes
	// the current throttling status.
	ThrottleStatus       *ThrottleStatus `protobuf:"bytes,5,opt,name=throttle_status,json=throttleStatus,proto3" json:"throttle_status,omitempty"`
	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
	XXX_unrecognized     []byte          `json:"-"`
	XXX_sizecache        int32           `json:"-"`
}

func (*ReadRowsResponse) Descriptor

func (*ReadRowsResponse) Descriptor() ([]byte, []int)

func (*ReadRowsResponse) GetAvroRows

func (m *ReadRowsResponse) GetAvroRows() *AvroRows

func (*ReadRowsResponse) GetRows

func (m *ReadRowsResponse) GetRows() isReadRowsResponse_Rows

func (*ReadRowsResponse) GetStatus

func (m *ReadRowsResponse) GetStatus() *StreamStatus

func (*ReadRowsResponse) GetThrottleStatus

func (m *ReadRowsResponse) GetThrottleStatus() *ThrottleStatus

func (*ReadRowsResponse) ProtoMessage

func (*ReadRowsResponse) ProtoMessage()

func (*ReadRowsResponse) Reset

func (m *ReadRowsResponse) Reset()

func (*ReadRowsResponse) String

func (m *ReadRowsResponse) String() string

func (*ReadRowsResponse) XXX_DiscardUnknown

func (m *ReadRowsResponse) XXX_DiscardUnknown()

func (*ReadRowsResponse) XXX_Marshal

func (m *ReadRowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ReadRowsResponse) XXX_Merge

func (m *ReadRowsResponse) XXX_Merge(src proto.Message)

func (*ReadRowsResponse) XXX_OneofFuncs

func (*ReadRowsResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{})

XXX_OneofFuncs is for the internal use of the proto package.

func (*ReadRowsResponse) XXX_Size

func (m *ReadRowsResponse) XXX_Size() int

func (*ReadRowsResponse) XXX_Unmarshal

func (m *ReadRowsResponse) XXX_Unmarshal(b []byte) error

type ReadRowsResponse_AvroRows

type ReadRowsResponse_AvroRows struct {
	AvroRows *AvroRows `protobuf:"bytes,3,opt,name=avro_rows,json=avroRows,proto3,oneof"`
}

type ReadSession

type ReadSession struct {
	// Unique identifier for the session. In the form
	// `projects/{project_id}/sessions/{session_id}`
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Time at which the session becomes invalid. After this time, subsequent
	// requests to read this Session will return errors.
	ExpireTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"`
	// The schema for the read. If read_options.selected_fields is set, the
	// schema may be different from the table schema as it will only contain
	// the selected fields.
	//
	// Types that are valid to be assigned to Schema:
	//	*ReadSession_AvroSchema
	Schema isReadSession_Schema `protobuf_oneof:"schema"`
	// Streams associated with this session.
	Streams []*Stream `protobuf:"bytes,4,rep,name=streams,proto3" json:"streams,omitempty"`
	// Table that this ReadSession is reading from.
	TableReference *TableReference `protobuf:"bytes,7,opt,name=table_reference,json=tableReference,proto3" json:"table_reference,omitempty"`
	// Any modifiers which are applied when reading from the specified table.
	TableModifiers       *TableModifiers `protobuf:"bytes,8,opt,name=table_modifiers,json=tableModifiers,proto3" json:"table_modifiers,omitempty"`
	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
	XXX_unrecognized     []byte          `json:"-"`
	XXX_sizecache        int32           `json:"-"`
}

func (*ReadSession) Descriptor

func (*ReadSession) Descriptor() ([]byte, []int)

func (*ReadSession) GetAvroSchema

func (m *ReadSession) GetAvroSchema() *AvroSchema

func (*ReadSession) GetExpireTime

func (m *ReadSession) GetExpireTime() *timestamp.Timestamp

func (*ReadSession) GetName

func (m *ReadSession) GetName() string

func (*ReadSession) GetSchema

func (m *ReadSession) GetSchema() isReadSession_Schema

func (*ReadSession) GetStreams

func (m *ReadSession) GetStreams() []*Stream

func (*ReadSession) GetTableModifiers

func (m *ReadSession) GetTableModifiers() *TableModifiers

func (*ReadSession) GetTableReference

func (m *ReadSession) GetTableReference() *TableReference

func (*ReadSession) ProtoMessage

func (*ReadSession) ProtoMessage()

func (*ReadSession) Reset

func (m *ReadSession) Reset()

func (*ReadSession) String

func (m *ReadSession) String() string

func (*ReadSession) XXX_DiscardUnknown

func (m *ReadSession) XXX_DiscardUnknown()

func (*ReadSession) XXX_Marshal

func (m *ReadSession) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ReadSession) XXX_Merge

func (m *ReadSession) XXX_Merge(src proto.Message)

func (*ReadSession) XXX_OneofFuncs

func (*ReadSession) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{})

XXX_OneofFuncs is for the internal use of the proto package.

func (*ReadSession) XXX_Size

func (m *ReadSession) XXX_Size() int

func (*ReadSession) XXX_Unmarshal

func (m *ReadSession) XXX_Unmarshal(b []byte) error

type ReadSession_AvroSchema

type ReadSession_AvroSchema struct {
	AvroSchema *AvroSchema `protobuf:"bytes,5,opt,name=avro_schema,json=avroSchema,proto3,oneof"`
}

type SplitReadStreamRequest

type SplitReadStreamRequest struct {
	// Stream to split.
	OriginalStream       *Stream  `protobuf:"bytes,1,opt,name=original_stream,json=originalStream,proto3" json:"original_stream,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

func (*SplitReadStreamRequest) Descriptor

func (*SplitReadStreamRequest) Descriptor() ([]byte, []int)

func (*SplitReadStreamRequest) GetOriginalStream

func (m *SplitReadStreamRequest) GetOriginalStream() *Stream

func (*SplitReadStreamRequest) ProtoMessage

func (*SplitReadStreamRequest) ProtoMessage()

func (*SplitReadStreamRequest) Reset

func (m *SplitReadStreamRequest) Reset()

func (*SplitReadStreamRequest) String

func (m *SplitReadStreamRequest) String() string

func (*SplitReadStreamRequest) XXX_DiscardUnknown

func (m *SplitReadStreamRequest) XXX_DiscardUnknown()

func (*SplitReadStreamRequest) XXX_Marshal

func (m *SplitReadStreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SplitReadStreamRequest) XXX_Merge

func (m *SplitReadStreamRequest) XXX_Merge(src proto.Message)

func (*SplitReadStreamRequest) XXX_Size

func (m *SplitReadStreamRequest) XXX_Size() int

func (*SplitReadStreamRequest) XXX_Unmarshal

func (m *SplitReadStreamRequest) XXX_Unmarshal(b []byte) error

type SplitReadStreamResponse

type SplitReadStreamResponse struct {
	// Primary stream. Will contain the beginning portion of
	// |original_stream|.
	PrimaryStream *Stream `protobuf:"bytes,1,opt,name=primary_stream,json=primaryStream,proto3" json:"primary_stream,omitempty"`
	// Remainder stream. Will contain the tail of |original_stream|.
	RemainderStream      *Stream  `protobuf:"bytes,2,opt,name=remainder_stream,json=remainderStream,proto3" json:"remainder_stream,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

func (*SplitReadStreamResponse) Descriptor

func (*SplitReadStreamResponse) Descriptor() ([]byte, []int)

func (*SplitReadStreamResponse) GetPrimaryStream

func (m *SplitReadStreamResponse) GetPrimaryStream() *Stream

func (*SplitReadStreamResponse) GetRemainderStream

func (m *SplitReadStreamResponse) GetRemainderStream() *Stream

func (*SplitReadStreamResponse) ProtoMessage

func (*SplitReadStreamResponse) ProtoMessage()

func (*SplitReadStreamResponse) Reset

func (m *SplitReadStreamResponse) Reset()

func (*SplitReadStreamResponse) String

func (m *SplitReadStreamResponse) String() string

func (*SplitReadStreamResponse) XXX_DiscardUnknown

func (m *SplitReadStreamResponse) XXX_DiscardUnknown()

func (*SplitReadStreamResponse) XXX_Marshal

func (m *SplitReadStreamResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SplitReadStreamResponse) XXX_Merge

func (m *SplitReadStreamResponse) XXX_Merge(src proto.Message)

func (*SplitReadStreamResponse) XXX_Size

func (m *SplitReadStreamResponse) XXX_Size() int

func (*SplitReadStreamResponse) XXX_Unmarshal

func (m *SplitReadStreamResponse) XXX_Unmarshal(b []byte) error

type Stream

type Stream struct {
	// Name of the stream. In the form
	// `/projects/{project_id}/stream/{stream_id}`
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Rows in the stream.
	RowCount             int64    `protobuf:"varint,2,opt,name=row_count,json=rowCount,proto3" json:"row_count,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

func (*Stream) Descriptor

func (*Stream) Descriptor() ([]byte, []int)

func (*Stream) GetName

func (m *Stream) GetName() string

func (*Stream) GetRowCount

func (m *Stream) GetRowCount() int64

func (*Stream) ProtoMessage

func (*Stream) ProtoMessage()

func (*Stream) Reset

func (m *Stream) Reset()

func (*Stream) String

func (m *Stream) String() string

func (*Stream) XXX_DiscardUnknown

func (m *Stream) XXX_DiscardUnknown()

func (*Stream) XXX_Marshal

func (m *Stream) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Stream) XXX_Merge

func (m *Stream) XXX_Merge(src proto.Message)

func (*Stream) XXX_Size

func (m *Stream) XXX_Size() int

func (*Stream) XXX_Unmarshal

func (m *Stream) XXX_Unmarshal(b []byte) error

type StreamPosition

type StreamPosition struct {
	Stream *Stream `protobuf:"bytes,1,opt,name=stream,proto3" json:"stream,omitempty"`
	// Position in the stream.
	Offset               int64    `protobuf:"varint,2,opt,name=offset,proto3" json:"offset,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

func (*StreamPosition) Descriptor

func (*StreamPosition) Descriptor() ([]byte, []int)

func (*StreamPosition) GetOffset

func (m *StreamPosition) GetOffset() int64

func (*StreamPosition) GetStream

func (m *StreamPosition) GetStream() *Stream

func (*StreamPosition) ProtoMessage

func (*StreamPosition) ProtoMessage()

func (*StreamPosition) Reset

func (m *StreamPosition) Reset()

func (*StreamPosition) String

func (m *StreamPosition) String() string

func (*StreamPosition) XXX_DiscardUnknown

func (m *StreamPosition) XXX_DiscardUnknown()

func (*StreamPosition) XXX_Marshal

func (m *StreamPosition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*StreamPosition) XXX_Merge

func (m *StreamPosition) XXX_Merge(src proto.Message)

func (*StreamPosition) XXX_Size

func (m *StreamPosition) XXX_Size() int

func (*StreamPosition) XXX_Unmarshal

func (m *StreamPosition) XXX_Unmarshal(b []byte) error

type StreamStatus

type StreamStatus struct {
	// Number of estimated rows in the current stream. May change over time as
	// different readers in the stream progress at rates which are relatively fast
	// or slow.
	EstimatedRowCount    int64    `protobuf:"varint,1,opt,name=estimated_row_count,json=estimatedRowCount,proto3" json:"estimated_row_count,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

func (*StreamStatus) Descriptor

func (*StreamStatus) Descriptor() ([]byte, []int)

func (*StreamStatus) GetEstimatedRowCount

func (m *StreamStatus) GetEstimatedRowCount() int64

func (*StreamStatus) ProtoMessage

func (*StreamStatus) ProtoMessage()

func (*StreamStatus) Reset

func (m *StreamStatus) Reset()

func (*StreamStatus) String

func (m *StreamStatus) String() string

func (*StreamStatus) XXX_DiscardUnknown

func (m *StreamStatus) XXX_DiscardUnknown()

func (*StreamStatus) XXX_Marshal

func (m *StreamStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*StreamStatus) XXX_Merge

func (m *StreamStatus) XXX_Merge(src proto.Message)

func (*StreamStatus) XXX_Size

func (m *StreamStatus) XXX_Size() int

func (*StreamStatus) XXX_Unmarshal

func (m *StreamStatus) XXX_Unmarshal(b []byte) error

type TableModifiers

type TableModifiers struct {
	// The snapshot time of the table. If not set, interpreted as now.
	SnapshotTime         *timestamp.Timestamp `protobuf:"bytes,1,opt,name=snapshot_time,json=snapshotTime,proto3" json:"snapshot_time,omitempty"`
	XXX_NoUnkeyedLiteral struct{}             `json:"-"`
	XXX_unrecognized     []byte               `json:"-"`
	XXX_sizecache        int32                `json:"-"`
}

All fields in this message optional.

func (*TableModifiers) Descriptor

func (*TableModifiers) Descriptor() ([]byte, []int)

func (*TableModifiers) GetSnapshotTime

func (m *TableModifiers) GetSnapshotTime() *timestamp.Timestamp

func (*TableModifiers) ProtoMessage

func (*TableModifiers) ProtoMessage()

func (*TableModifiers) Reset

func (m *TableModifiers) Reset()

func (*TableModifiers) String

func (m *TableModifiers) String() string

func (*TableModifiers) XXX_DiscardUnknown

func (m *TableModifiers) XXX_DiscardUnknown()

func (*TableModifiers) XXX_Marshal

func (m *TableModifiers) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*TableModifiers) XXX_Merge

func (m *TableModifiers) XXX_Merge(src proto.Message)

func (*TableModifiers) XXX_Size

func (m *TableModifiers) XXX_Size() int

func (*TableModifiers) XXX_Unmarshal

func (m *TableModifiers) XXX_Unmarshal(b []byte) error

type TableReadOptions

type TableReadOptions struct {
	// Optional. Names of the fields in the table that should be read. If empty,
	// all fields will be read. If the specified field is a nested field, all the
	// sub-fields in the field will be selected. The output field order is
	// unrelated to the order of fields in selected_fields.
	SelectedFields []string `protobuf:"bytes,1,rep,name=selected_fields,json=selectedFields,proto3" json:"selected_fields,omitempty"`
	// Optional. SQL text filtering statement, similar to a WHERE clause in
	// a query. Currently, we support combinations of predicates that are
	// a comparison between a column and a constant value in SQL statement.
	// Aggregates are not supported.
	//
	// Example: "a > DATE '2014-9-27' AND (b > 5 and C LIKE 'date')"
	RowRestriction       string   `protobuf:"bytes,2,opt,name=row_restriction,json=rowRestriction,proto3" json:"row_restriction,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Options dictating how we read a table.

func (*TableReadOptions) Descriptor

func (*TableReadOptions) Descriptor() ([]byte, []int)

func (*TableReadOptions) GetRowRestriction

func (m *TableReadOptions) GetRowRestriction() string

func (*TableReadOptions) GetSelectedFields

func (m *TableReadOptions) GetSelectedFields() []string

func (*TableReadOptions) ProtoMessage

func (*TableReadOptions) ProtoMessage()

func (*TableReadOptions) Reset

func (m *TableReadOptions) Reset()

func (*TableReadOptions) String

func (m *TableReadOptions) String() string

func (*TableReadOptions) XXX_DiscardUnknown

func (m *TableReadOptions) XXX_DiscardUnknown()

func (*TableReadOptions) XXX_Marshal

func (m *TableReadOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*TableReadOptions) XXX_Merge

func (m *TableReadOptions) XXX_Merge(src proto.Message)

func (*TableReadOptions) XXX_Size

func (m *TableReadOptions) XXX_Size() int

func (*TableReadOptions) XXX_Unmarshal

func (m *TableReadOptions) XXX_Unmarshal(b []byte) error

type TableReference

type TableReference struct {
	// The assigned project ID of the project.
	ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
	// The ID of the dataset in the above project.
	DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
	// The ID of the table in the above dataset.
	TableId              string   `protobuf:"bytes,3,opt,name=table_id,json=tableId,proto3" json:"table_id,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Table reference that includes just the 3 strings needed to identify a table.

func (*TableReference) Descriptor

func (*TableReference) Descriptor() ([]byte, []int)

func (*TableReference) GetDatasetId

func (m *TableReference) GetDatasetId() string

func (*TableReference) GetProjectId

func (m *TableReference) GetProjectId() string

func (*TableReference) GetTableId

func (m *TableReference) GetTableId() string

func (*TableReference) ProtoMessage

func (*TableReference) ProtoMessage()

func (*TableReference) Reset

func (m *TableReference) Reset()

func (*TableReference) String

func (m *TableReference) String() string

func (*TableReference) XXX_DiscardUnknown

func (m *TableReference) XXX_DiscardUnknown()

func (*TableReference) XXX_Marshal

func (m *TableReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*TableReference) XXX_Merge

func (m *TableReference) XXX_Merge(src proto.Message)

func (*TableReference) XXX_Size

func (m *TableReference) XXX_Size() int

func (*TableReference) XXX_Unmarshal

func (m *TableReference) XXX_Unmarshal(b []byte) error

type ThrottleStatus

type ThrottleStatus struct {
	// How much this connection is being throttled.
	// 0 is no throttling, 100 is completely throttled.
	ThrottlePercent      int32    `protobuf:"varint,1,opt,name=throttle_percent,json=throttlePercent,proto3" json:"throttle_percent,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Information on if the current connection is being throttled.

func (*ThrottleStatus) Descriptor

func (*ThrottleStatus) Descriptor() ([]byte, []int)

func (*ThrottleStatus) GetThrottlePercent

func (m *ThrottleStatus) GetThrottlePercent() int32

func (*ThrottleStatus) ProtoMessage

func (*ThrottleStatus) ProtoMessage()

func (*ThrottleStatus) Reset

func (m *ThrottleStatus) Reset()

func (*ThrottleStatus) String

func (m *ThrottleStatus) String() string

func (*ThrottleStatus) XXX_DiscardUnknown

func (m *ThrottleStatus) XXX_DiscardUnknown()

func (*ThrottleStatus) XXX_Marshal

func (m *ThrottleStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ThrottleStatus) XXX_Merge

func (m *ThrottleStatus) XXX_Merge(src proto.Message)

func (*ThrottleStatus) XXX_Size

func (m *ThrottleStatus) XXX_Size() int

func (*ThrottleStatus) XXX_Unmarshal

func (m *ThrottleStatus) XXX_Unmarshal(b []byte) error

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL