fileutils

package
v1.1.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Sep 21, 2018 License: Apache-2.0 Imports: 24 Imported by: 0

Documentation

Index

Constants

View Source
const (
	DefaultQueueSize            = 10000
	DefaultAutoReloaderInterval = 11 * time.Minute

	MaxDaysInPast = 30
)

Variables

View Source
var ErrObjectExist = errors.New("storage: object does exist")

Functions

func AggregateProtoFiles

func AggregateProtoFiles(storage filab.FileStorage, files []filab.Path,
	dest io.Writer) error

func AggregateToGcs

func AggregateToGcs(storage filab.FileStorage, ctx context.Context,
	files []filab.Path, destGsPath filab.Path) error

AggregateToGcs copies a messages from all files into one dest path.

func ConnectToCloud

func ConnectToCloud(keyFile string) (*storage.Client, error)

func CopyToCloud

func CopyToCloud(baseCtx context.Context, storage filab.FileStorage,
	src, dest filab.Path) error

func CopyToCloudF

func CopyToCloudF(gclient *CloudStorageClient, filePath, objectPath string)

func FindSharded added in v1.1.0

func FindSharded(storage filab.FileStorage, gs filab.Path,
	pattern *regexp.Regexp) ([]filab.Path, error)

TODO URGENT this is complex and needs tests FindSharded looks up a set of files matching sharding pattern.

func GcsCreateWriter

func GcsCreateWriter(client *storage.Client, ctx context.Context, gsPath string, overwrite bool) (*storage.Writer, error)

func GenSharded

func GenSharded(dirPath filab.Path, prefix string, numShards int, suffix string) []filab.Path

func ObjectsExist added in v1.1.0

func ObjectsExist(storage filab.FileStorage, files ...filab.Path) bool

func OldCopyToCloud

func OldCopyToCloud(gclient *storage.Client, baseCtx context.Context,
	filePath, objectPath string) error

func PrefixedWithPattern

func PrefixedWithPattern(storage filab.FileStorage, ctx context.Context,
	prefix filab.Path, pattern StringMatcher) ([]filab.Path, error)

Types

type Backuper

type Backuper struct {
	Interval    time.Duration
	CopyTimeout time.Duration
	GcsPath     filab.Path
	GceKeyFile  string
	// If set true, the proto files will be first aggregated.
	Aggregate         bool
	DeleteAfterBackup bool
	StripSrcPrefix    string
	// contains filtered or unexported fields
}

func NewBackuper

func NewBackuper(storage filab.FileStorage, destPath filab.Path,
	interval time.Duration, delete bool) *Backuper

func (*Backuper) Add

func (b *Backuper) Add(f string, t time.Time)

func (*Backuper) BackupNow

func (b *Backuper) BackupNow(ctx context.Context) error

func (*Backuper) Start

func (b *Backuper) Start()

func (*Backuper) Stop

func (b *Backuper) Stop()

type CloudStorageClient

type CloudStorageClient struct {
	// contains filtered or unexported fields
}

func NewCloudStorageClient

func NewCloudStorageClient(bucketName string, keyFile string) (*CloudStorageClient, error)

func (*CloudStorageClient) Close

func (g *CloudStorageClient) Close() error

func (*CloudStorageClient) Copy

func (g *CloudStorageClient) Copy(dst, src string) error

func (*CloudStorageClient) CopyAndRm

func (g *CloudStorageClient) CopyAndRm(dst, src string) error

func (*CloudStorageClient) CreateWriter

func (g *CloudStorageClient) CreateWriter(ctx context.Context, fileName string,
	overwrite bool) (*storage.Writer, error)

func (*CloudStorageClient) Find

func (*CloudStorageClient) ForBucket

func (g *CloudStorageClient) ForBucket(bucket string) *CloudStorageClient

func (*CloudStorageClient) GzipReader

func (g *CloudStorageClient) GzipReader(filepath string, ctx context.Context) (reader io.ReadCloser, err error)

func (*CloudStorageClient) Reader

func (g *CloudStorageClient) Reader(filepath string, ctx context.Context) (*storage.Reader, error)

type StringMatcher

type StringMatcher interface {
	MatchString(s string) bool
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL