bulk

package
v2.0.1+incompatible Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 1, 2019 License: MIT Imports: 11 Imported by: 0

README

Bulk 2.0 API

back

The bulk package is an implementation of Salesforce APIs centered on Bulk 2.0 operations. These operations include:

  • Creating a job
  • Upload job data
  • Close or Abort a job
  • Delete a job
  • Get all jobs
  • Get job info
  • Get job successful records
  • Get job failed records
  • Get job unprocessed records

As a reference, see Salesforce API documentation

Examples

The following are examples to access the APIs. It is assumed that a sfdc session has been created.

Creating a Job
	resource, err := bulk.NewResource(session)
	if err != nil {
		fmt.Printf("Bulk Resource Error %s\n", err.Error())
		return
	}

	jobOpts := bulk.Options{
		ColumnDelimiter: bulk.Pipe,
		Operation:       bulk.Insert,
		Object:          "Account",
	}
	job, err := resource.CreateJob(jobOpts)
	if err != nil {
		fmt.Printf("Job Create Error %s\n", err.Error())
		return
	}
Uploading Job Data
	fields := []string{
		"Name",
		"FirstName__c",
		"LastName__c",
		"Site",
	}
	formatter, err := bulk.NewFormatter(job, fields)
	if err != nil {
		fmt.Printf("Formatter Error %s\n", err.Error())
		return
	}

	failedRecord := &bulkRecord{
		fields: map[string]interface{}{
			"FirstName__c": "TallFailing",
			"LastName__c":  "PersonFailing",
			"Site":         "MySite",
		},
	}
	successRecord := &bulkRecord{
		fields: map[string]interface{}{
			"Name":         "Tall Person Success",
			"FirstName__c": "TallSuccess",
			"LastName__c":  "PersonSuccess",
			"Site":         "MySite",
		},
	}
	err = formatter.Add(failedRecord, successRecord)
	if err != nil {
		fmt.Printf("Formatter Record Error %s\n", err.Error())
		return
	}

	err = job.Upload(formatter.Reader())
	if err != nil {
		fmt.Printf("Job Upload Error %s\n", err.Error())
		return
	}
Close or Abort Job
	response, err := job.Close()
	if err != nil {
		fmt.Printf("Job Info Error %s\n", err.Error())
		return
	}
	fmt.Println("Bulk Job Closed")
	fmt.Println("-------------------")
	fmt.Printf("%+v\n", response)
Delete a Job
	err := job.Delete()
	if err != nil {
		fmt.Printf("Job Delete Error %s\n", err.Error())
		return
	}
Get All Jobs
	parameters := bulk.Parameters{
		IsPkChunkingEnabled: false,
		JobType:             bulk.V2Ingest,
	}
	jobs, err := resource.AllJobs(parameters)
	if err != nil {
		fmt.Printf("All Jobs Error %s\n", err.Error())
		return
	}
	fmt.Println("All Jobs")
	fmt.Println("-------------------")
	fmt.Printf("%+v\n\n", jobs)
Get Job Info
	info, err := job.Info()
	if err != nil {
		fmt.Printf("Job Info Error %s\n", err.Error())
		return
	}
	fmt.Println("Bulk Job Information")
	fmt.Println("-------------------")
	fmt.Printf("%+v\n", info)
Get Job Successful Records
	info, err = job.Info()
	if err != nil {
		fmt.Printf("Job Info Error %s\n", err.Error())
		return
	}

	if (info.NumberRecordsProcessed - info.NumberRecordsFailed) > 0 {
		successRecords, err := job.SuccessfulRecords()
		if err != nil {
			fmt.Printf("Job Success Records Error %s\n", err.Error())
			return
		}
		fmt.Println("Successful Record(s)")
		fmt.Println("-------------------")
		for _, successRecord := range successRecords {
			fmt.Printf("%+v\n\n", successRecord)
		}

	}
Get Job Failed Records
	info, err = job.Info()
	if err != nil {
		fmt.Printf("Job Info Error %s\n", err.Error())
		return
	}

	if info.NumberRecordsFailed > 0 {
		failedRecords, err := job.FailedRecords()
		if err != nil {
			fmt.Printf("Job Failed Records Error %s\n", err.Error())
			return
		}
		fmt.Println("Failed Record(s)")
		fmt.Println("-------------------")
		for _, failedRecord := range failedRecords {
			fmt.Printf("%+v\n\n", failedRecord)
		}
	}
Get Job Unprocessed Records
	info, err = job.Info()
	if err != nil {
		fmt.Printf("Job Info Error %s\n", err.Error())
		return
	}

	unprocessedRecords, err := job.UnprocessedRecords()
	if err != nil {
		fmt.Printf("Job Unprocessed Records Error %s\n", err.Error())
		return
	}
	fmt.Println("Unprocessed Record(s)")
	fmt.Println("-------------------")
	for _, unprocessedRecord := range unprocessedRecords {
		fmt.Printf("%+v\n\n", unprocessedRecord)
	}

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type ColumnDelimiter

type ColumnDelimiter string

ColumnDelimiter is the column delimiter used for CSV job data.

const (
	// Backquote is the (`) character.
	Backquote ColumnDelimiter = "BACKQUOTE"
	// Caret is the (^) character.
	Caret ColumnDelimiter = "CARET"
	// Comma is the (,) character.
	Comma ColumnDelimiter = "COMMA"
	// Pipe is the (|) character.
	Pipe ColumnDelimiter = "PIPE"
	// SemiColon is the (;) character.
	SemiColon ColumnDelimiter = "SEMICOLON"
	// Tab is the (\t) character.
	Tab ColumnDelimiter = "TAB"
)

type ContentType

type ContentType string

ContentType is the format of the data being processed.

const CSV ContentType = "CSV"

CSV is the supported content data type.

type FailedRecord

type FailedRecord struct {
	Error string
	JobRecord
}

FailedRecord indicates why the record failed and the data of the record.

type Formatter

type Formatter struct {
	// contains filtered or unexported fields
}

Formatter is the object that will add records for the bulk uploader.

func NewFormatter

func NewFormatter(job *Job, fields []string) (*Formatter, error)

NewFormatter creates a new formatter using the job and the list of fields.

func (*Formatter) Add

func (f *Formatter) Add(records ...Record) error

Add will place a record in the bulk uploader.

func (*Formatter) Reader

func (f *Formatter) Reader() *strings.Reader

Reader will return a reader of the bulk uploader field record body.

type Info

type Info struct {
	Response
	ApexProcessingTime      int    `json:"apexProcessingTime"`
	APIActiveProcessingTime int    `json:"apiActiveProcessingTime"`
	NumberRecordsFailed     int    `json:"numberRecordsFailed"`
	NumberRecordsProcessed  int    `json:"numberRecordsProcessed"`
	Retries                 int    `json:"retries"`
	TotalProcessingTime     int    `json:"totalProcessingTime"`
	ErrorMessage            string `json:"errorMessage"`
}

Info is the response to the job information API.

type Job

type Job struct {
	// contains filtered or unexported fields
}

Job is the bulk job.

func (*Job) Abort

func (j *Job) Abort() (Response, error)

Abort will abort the current job.

func (*Job) Close

func (j *Job) Close() (Response, error)

Close will close the current job.

func (*Job) Delete

func (j *Job) Delete() error

Delete will delete the current job.

func (*Job) FailedRecords

func (j *Job) FailedRecords() ([]FailedRecord, error)

FailedRecords returns the failed records for the job.

func (*Job) Info

func (j *Job) Info() (Info, error)

Info returns the current job information.

func (*Job) SuccessfulRecords

func (j *Job) SuccessfulRecords() ([]SuccessfulRecord, error)

SuccessfulRecords returns the successful records for the job.

func (*Job) UnprocessedRecords

func (j *Job) UnprocessedRecords() ([]UnprocessedRecord, error)

UnprocessedRecords returns the unprocessed records for the job.

func (*Job) Upload

func (j *Job) Upload(body io.Reader) error

Upload will upload data to processing.

type JobRecord

type JobRecord struct {
	ID string
	UnprocessedRecord
}

JobRecord is the record for the job. Includes the Salesforce ID along with the fields.

type JobType

type JobType string

JobType is the bulk job type.

const (
	// BigObjects is the big objects job.
	BigObjects JobType = "BigObjectIngest"
	// Classic is the bulk job 1.0.
	Classic JobType = "Classic"
	// V2Ingest is the bulk job 2.0.
	V2Ingest JobType = "V2Ingest"
)

type Jobs

type Jobs struct {
	// contains filtered or unexported fields
}

Jobs presents the response from the all jobs request.

func (*Jobs) Done

func (j *Jobs) Done() bool

Done indicates whether there are more jobs to get.

func (*Jobs) Next

func (j *Jobs) Next() (*Jobs, error)

Next will retrieve the next batch of job information.

func (*Jobs) Records

func (j *Jobs) Records() []Response

Records contains the information for each retrieved job.

type LineEnding

type LineEnding string

LineEnding is the line ending used for the CSV job data.

const (
	// Linefeed is the (\n) character.
	Linefeed LineEnding = "LF"
	// CarriageReturnLinefeed is the (\r\n) character.
	CarriageReturnLinefeed LineEnding = "CRLF"
)

type Operation

type Operation string

Operation is the processing operation for the job.

const (
	// Insert is the object operation for inserting records.
	Insert Operation = "insert"
	// Delete is the object operation for deleting records.
	Delete Operation = "delete"
	// Update is the object operation for updating records.
	Update Operation = "update"
	// Upsert is the object operation for upserting records.
	Upsert Operation = "upsert"
)

type Options

type Options struct {
	ColumnDelimiter     ColumnDelimiter `json:"columnDelimiter"`
	ContentType         ContentType     `json:"contentType"`
	ExternalIDFieldName string          `json:"externalIdFieldName"`
	LineEnding          LineEnding      `json:"lineEnding"`
	Object              string          `json:"object"`
	Operation           Operation       `json:"operation"`
}

Options are the options for the job.

ColumnDelimiter is the delimiter used for the CSV job. This field is optional.

ContentType is the content type for the job. This field is optional.

ExternalIDFieldName is the external ID field in the object being updated. Only needed for upsert operations. This field is required for upsert operations.

LineEnding is the line ending used for the CSV job data. This field is optional.

Object is the object type for the data bneing processed. This field is required.

Operation is the processing operation for the job. This field is required.

type Parameters

type Parameters struct {
	IsPkChunkingEnabled bool
	JobType             JobType
}

Parameters to query all of the bulk jobs.

IsPkChunkingEnabled will filter jobs with PK chunking enabled.

JobType will filter jobs based on job type.

type Record

type Record interface {
	Fields() map[string]interface{}
	InsertNull() bool
}

Record is the interface to the fields of the bulk uploader record.

type Resource

type Resource struct {
	// contains filtered or unexported fields
}

Resource is the structure that can be used to create bulk 2.0 jobs.

func NewResource

func NewResource(session session.ServiceFormatter) (*Resource, error)

NewResource creates a new bulk 2.0 REST resource. If the session is nil an error will be returned.

func (*Resource) AllJobs

func (r *Resource) AllJobs(parameters Parameters) (*Jobs, error)

AllJobs will retrieve all of the bulk 2.0 jobs.

func (*Resource) CreateJob

func (r *Resource) CreateJob(options Options) (*Job, error)

CreateJob will create a new bulk 2.0 job from the options that where passed. The Job that is returned can be used to upload object data to the Salesforce org.

type Response

type Response struct {
	APIVersion          float32 `json:"apiVersion"`
	ColumnDelimiter     string  `json:"columnDelimiter"`
	ConcurrencyMode     string  `json:"concurrencyMode"`
	ContentType         string  `json:"contentType"`
	ContentURL          string  `json:"contentUrl"`
	CreatedByID         string  `json:"createdById"`
	CreatedDate         string  `json:"createdDate"`
	ExternalIDFieldName string  `json:"externalIdFieldName"`
	ID                  string  `json:"id"`
	JobType             string  `json:"jobType"`
	LineEnding          string  `json:"lineEnding"`
	Object              string  `json:"object"`
	Operation           string  `json:"operation"`
	State               string  `json:"state"`
	SystemModstamp      string  `json:"systemModstamp"`
}

Response is the response to job APIs.

type State

type State string

State is the current state of processing for the job.

const (
	// Open the job has been created and job data can be uploaded tothe job.
	Open State = "Open"
	// UpdateComplete all data for the job has been uploaded and the job is ready to be queued and processed.
	UpdateComplete State = "UploadComplete"
	// Aborted the job has been aborted.
	Aborted State = "Aborted"
	// JobComplete the job was processed by Salesforce.
	JobComplete State = "JobComplete"
	// Failed some records in the job failed.
	Failed State = "Failed"
)

type SuccessfulRecord

type SuccessfulRecord struct {
	Created bool
	JobRecord
}

SuccessfulRecord indicates for the record was created and the data that was uploaded.

type UnprocessedRecord

type UnprocessedRecord struct {
	Fields map[string]string
}

UnprocessedRecord is the unprocessed records from the job.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL