schedulerapi

package
v0.0.0-...-afb19b5 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 25, 2018 License: MIT Imports: 24 Imported by: 0

Documentation

Index

Constants

View Source
const BufferSizeWriteLimit int = 1024 * 1024 * 3.5 // 3.5 MB

BufferSizeWriteLimit defines the approximated size to write content to a blob (the actual limit is 4MB, we are stopping before)

View Source
const JobBlobPrefixEnvVarName = "BLOBPREFIX"

JobBlobPrefixEnvVarName defines the environment variable containing the storage blob prefix name for the job

View Source
const JobCorrelationIDLabelName = "job_correlation_id"

JobCorrelationIDLabelName defines the label name to correlate jobs with additional ones

View Source
const JobEventGridSasKeyEnvVarName = "EVENTGRIDSASKEY"

JobEventGridSasKeyEnvVarName defines the environment variable containing the event grid sas key for the job

View Source
const JobEventGridTopicEndpointEnvVarName = "EVENTGRIDTOPICENDPOINT"

JobEventGridTopicEndpointEnvVarName defines the environment variable containing the event grid endpoint for the job

View Source
const JobHasFinalizerAnnotationName = "job_has_finalizer"

JobHasFinalizerAnnotationName defines if job has a finalizer

View Source
const JobHasWatcherAnnotationName = "job_has_watcher"

JobHasWatcherAnnotationName defines if job has a companion watcher job

View Source
const JobIDEnvVarName = "JOBID"

JobIDEnvVarName defines the environment variable containing the job identifier

View Source
const JobItemsPerJobEnvVarName = "ITEMSPERJOB"

JobItemsPerJobEnvVarName defines the environment variable containing the number of items to be processed per job

View Source
const JobNameEnvVarName = "JOBNAME"

JobNameEnvVarName defines the environment variable containing the job name

View Source
const JobStorageAccountKeyEnvVarName = "STORAGEKEY"

JobStorageAccountKeyEnvVarName defines the environment variable containing the storage account key

View Source
const JobStorageAccountNameEnvVarName = "STORAGEACCOUNT"

JobStorageAccountNameEnvVarName defines the environment variable containing the storage account name

View Source
const JobStorageConnectionStringEnvVarName = "STORAGECONNECTIONSTRING"

JobStorageConnectionStringEnvVarName defines the environment variable containing the storage connection string for the job

View Source
const JobStorageContainerEnvVarName = "CONTAINER"

JobStorageContainerEnvVarName defines the environment variable containing the storage container for the job

View Source
const MainJobAnnotationName = "job_is_main"

MainJobAnnotationName defines annotation name for main job flag

View Source
const StorageBlobPrefixAnnotationName = "storage_blob_prefix"

StorageBlobPrefixAnnotationName defines the label name containing the storage blob input prefix

View Source
const StorageContainerAnnotationName = "storage_container"

StorageContainerAnnotationName defines the label name containing the storage container name

Variables

View Source
var (

	// JobMemoryLimit defines the memory limit for the job pod when running on local cluster
	JobMemoryLimit = flag.String("jobMemoryLimit", getEnvString("JOBMEMORYLIMIT", "256Mi"), "Job Memory limit for local cluster (256Mi by default)")
)

Functions

func Initialize

func Initialize() error

Initialize configures the api

func SetupRoutes

func SetupRoutes(router *mux.Router)

SetupRoutes register the api routes

Types

type InputSplitter

type InputSplitter interface {
	Split(ctx context.Context, reader io.Reader, containerURL azblob.ContainerURL, jobNamePrefix string) (int, error)
}

InputSplitter defines a splitting input file type

type InputSplitterByLine

type InputSplitterByLine struct {
	// contains filtered or unexported fields
}

InputSplitterByLine splits a big file in lines

func NewInputSplitterByLine

func NewInputSplitterByLine(linesPerFile int) InputSplitterByLine

NewInputSplitterByLine creates a new instance of InputSplitterByLine

func (InputSplitterByLine) Split

func (splitter InputSplitterByLine) Split(ctx context.Context, reader io.Reader, containerURL azblob.ContainerURL, jobNamePrefix string) (int, error)

Split create smaller files from a big calculation input

type Job

type Job struct {
	ID                string                         `json:"id"`
	Status            string                         `json:"status"`
	StartTime         *time.Time                     `json:"startTime,omitempty"`
	CompletionTime    *time.Time                     `json:"completionTime,omitempty"`
	Active            int                            `json:"active,omitempty"`
	Succeeded         int                            `json:"succeeded,omitempty"`
	Failed            int                            `json:"failed,omitempty"`
	Parallelism       int                            `json:"parallelism,omitempty"`
	Completions       int                            `json:"completions,omitempty"`
	StorageContainer  string                         `json:"storageContainer,omitempty"`
	StorageBlobPrefix string                         `json:"storageBlobPrefix,omitempty"`
	ExecutionLocation scheduler.JobExecutionLocation `json:"executionLocation"`
}

Job defines a working item

func NewJob

func NewJob(job, finalizerJob *batchv1.Job) Job

NewJob creates a job from a existing K8s job

type JobCreateRequest

type JobCreateRequest struct {
	ID                string `json:"id"`
	StorageContainer  string `json:"storageContainer,omitempty"`
	StorageBlobPrefix string `json:"storageBlobPrefix,omitempty"`
}

JobCreateRequest defines a request to create a job where the files already exist

type Route

type Route struct {
	Name        string
	Method      string
	Pattern     string
	HandlerFunc http.HandlerFunc
}

Route - object representing a route handler

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL