bedrock

package
v0.0.0-...-d6c7bd1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 30, 2024 License: Apache-2.0 Imports: 14 Imported by: 1

Documentation

Index

Constants

View Source
const (
	ModelAnthorpicClaudeV2        = "anthropic.claude-v2"
	ModelAnthorpicClaudeInstantV1 = "anthropic.claude-instant-v1"
	ModelAmazonTitanTextLiteV1    = "amazon.titan-text-lite-v1"
	ModelAmazonTitanTextExpressV1 = "amazon.titan-text-express-v1"
	AI21LabsJurassic2UltraV1      = "ai21.j2-ultra-v1"
	AI21LabsJurassic2MidV1        = "ai21.j2-mid-v1"
	MetaLlama2Chat13BV1           = "meta.llama2-13b-chat-v1"
	CohereCommandTextV14          = "cohere.command-text-v14"
	CohereCommandLightTextV14     = "cohere.command-light-text-v14"
)

Variables

View Source
var (
	BedrockRegions = []string{"us-east-1", "us-west-2", "ap-southeast-1", "ap-northeast-1", "eu-central-1"}

	ErrBedrockNoActiveModel error
)
View Source
var (
	BedrockModelBody = map[string]map[string]interface{}{
		ModelAnthorpicClaudeV2: {
			"prompt":               "\n\nHuman: ",
			"max_tokens_to_sample": 300,
			"temperature":          modelTemperature,
			"top_k":                250,
			"top_p":                1,
			"stop_sequences":       []string{"\n\nHuman:"},
			"anthropic_version":    "bedrock-2023-05-31",
		},
		ModelAnthorpicClaudeInstantV1: {
			"prompt":               "\n\nHuman: ",
			"max_tokens_to_sample": 300,
			"temperature":          modelTemperature,
			"top_k":                250,
			"top_p":                1,
			"stop_sequences":       []string{"\n\nHuman:"},
			"anthropic_version":    "bedrock-2023-05-31",
		},
		ModelAmazonTitanTextLiteV1: {
			"inputText": "",
			"textGenerationConfig": map[string]interface{}{
				"maxTokenCount": 4096,
				"stopSequences": []string{},
				"temperature":   modelTemperature,
				"topP":          1,
			},
		},
		ModelAmazonTitanTextExpressV1: {
			"inputText": "",
			"textGenerationConfig": map[string]interface{}{
				"maxTokenCount": 8192,
				"stopSequences": []string{},
				"temperature":   modelTemperature,
				"topP":          1,
			},
		},
		AI21LabsJurassic2UltraV1: {
			"prompt":        "\n",
			"maxTokens":     200,
			"temperature":   modelTemperature,
			"topP":          1,
			"stopSequences": []string{},
			"countPenalty": map[string]int{
				"scale": 0,
			},
			"presencePenalty": map[string]int{
				"scale": 0,
			},
			"frequencyPenalty": map[string]int{
				"scale": 0,
			},
		},
		AI21LabsJurassic2MidV1: {
			"prompt":        "\n",
			"maxTokens":     200,
			"temperature":   modelTemperature,
			"topP":          1,
			"stopSequences": []string{},
			"countPenalty": map[string]int{
				"scale": 0,
			},
			"presencePenalty": map[string]int{
				"scale": 0,
			},
			"frequencyPenalty": map[string]int{
				"scale": 0,
			},
		},
		MetaLlama2Chat13BV1: {
			"prompt":      "",
			"max_gen_len": 2048,
			"temperature": modelTemperature,
			"top_p":       0.9,
		},
		CohereCommandTextV14: {
			"prompt":      "",
			"max_tokens":  200,
			"temperature": modelTemperature,
		},
		CohereCommandLightTextV14: {
			"prompt":      "",
			"max_tokens":  200,
			"temperature": modelTemperature,
		},
	}

	BedrockModelBodyInputKey = map[string]string{
		ModelAnthorpicClaudeV2:        "prompt",
		ModelAnthorpicClaudeInstantV1: "prompt",
		ModelAmazonTitanTextLiteV1:    "inputText",
		ModelAmazonTitanTextExpressV1: "inputText",
		AI21LabsJurassic2UltraV1:      "prompt",
		AI21LabsJurassic2MidV1:        "prompt",
		MetaLlama2Chat13BV1:           "prompt",
		CohereCommandTextV14:          "prompt",
		CohereCommandLightTextV14:     "prompt",
	}

	BedrockModelBodyInputSuffix = map[string]string{
		ModelAnthorpicClaudeV2:        "\n\nAssistant:",
		ModelAnthorpicClaudeInstantV1: "\n\nAssistant:",
		ModelAmazonTitanTextLiteV1:    "",
		ModelAmazonTitanTextExpressV1: "",
		AI21LabsJurassic2UltraV1:      "",
		AI21LabsJurassic2MidV1:        "",
		MetaLlama2Chat13BV1:           "",
		CohereCommandTextV14:          "",
		CohereCommandLightTextV14:     "",
	}
)

Functions

func CheckBedrockModelAvailability

func CheckBedrockModelAvailability() (bool, error)

func ListBedrockModels

func ListBedrockModels(region string) ([]model.AddGenerativeAiIntegrationRequest, error)

ListBedrockModels Fetch enabled Bedrock models using IAM roles

Types

type Bedrock

type Bedrock struct {
	common.GenerativeAiIntegrationCommon
	AWSAccessKey string `json:"aws_access_key" validate:"omitempty,min=16,max=128"`
	AWSSecretKey string `json:"aws_secret_key" validate:"omitempty,min=16,max=128"`
	AWSRegion    string `` /* 670-byte string literal not displayed */
	UseIAMRole   bool   `json:"use_iam_role"`
	ModelID      string `` /* 490-byte string literal not displayed */
}

func NewFromDBEntry

func NewFromDBEntry(ctx context.Context, config json.RawMessage) (*Bedrock, error)

func (*Bedrock) DecryptSecret

func (b *Bedrock) DecryptSecret(aes encryption.AES) error

func (*Bedrock) EncryptSecret

func (b *Bedrock) EncryptSecret(aes encryption.AES) error

func (*Bedrock) Message

func (b *Bedrock) Message(ctx context.Context, message string, dataChan chan string) error

func (*Bedrock) ValidateConfig

func (b *Bedrock) ValidateConfig(validate *validator.Validate) error

func (*Bedrock) VerifyAuth

func (b *Bedrock) VerifyAuth(ctx context.Context) error

type BedrockResponse

type BedrockResponse struct {
	Completion           string `json:"completion"`
	StopReason           string `json:"stop_reason"`
	Stop                 string `json:"stop"`
	Generation           string `json:"generation"`
	PromptTokenCount     int    `json:"prompt_token_count"`
	GenerationTokenCount int    `json:"generation_token_count"`
	Completions          []struct {
		Data struct {
			Text string `json:"text"`
		} `json:"data"`
		FinishReason struct {
			Reason string `json:"reason"`
		} `json:"finishReason"`
	} `json:"completions"`
	Generations []struct {
		FinishReason string `json:"finish_reason"`
		ID           string `json:"id"`
		Text         string `json:"text"`
	} `json:"generations"`
	Results []struct {
		TokenCount       int    `json:"tokenCount"`
		OutputText       string `json:"outputText"`
		CompletionReason string `json:"completionReason"`
	} `json:"results"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL