Documentation ¶
Index ¶
Constants ¶
View Source
const ( ModelLlama213BChatV1 = "meta.llama2-13b-chat-v1" ModelLlama270BChatV1 = "meta.llama2-70b-chat-v1" ModelTitanTextExpress = "amazon.titan-text-express-v1" ModelTitanTextLite = "amazon.titan-text-lite-v1" ModelTitanEmbedText = "amazon.titan-embed-text-v1" )
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type AI ¶
type AI struct { Output *bedrock.ListFoundationModelsOutput CustomOutput *bedrock.ListCustomModelsOutput // contains filtered or unexported fields }
func NewAI ¶
NewAI gets a new AI which uses the default AWS configuration (i.e. ~/.aws/config and standard AWS env vars).
func (*AI) GenerateStream ¶
func (*AI) GetEmbedding ¶
type AWSEmbedModel ¶
type AWSSubModel ¶
type LlamaSubModel ¶
type LlamaSubModel struct{}
func (LlamaSubModel) HandleResponseChunk ¶
func (m LlamaSubModel) HandleResponseChunk(chunkBytes []byte) ([]byte, error)
func (LlamaSubModel) MakeBody ¶
func (m LlamaSubModel) MakeBody(req *aicli.GenerateRequest) ([]byte, error)
type TitanEmbedTextSubModel ¶
type TitanEmbedTextSubModel struct{}
func (TitanEmbedTextSubModel) HandleResponseEmbed ¶
func (m TitanEmbedTextSubModel) HandleResponseEmbed(body []byte) ([]float32, error)
func (TitanEmbedTextSubModel) MakeBodyEmbed ¶
func (m TitanEmbedTextSubModel) MakeBodyEmbed(req *aicli.EmbeddingRequest) ([]byte, error)
type TitanTextSubModel ¶
type TitanTextSubModel struct{}
func (TitanTextSubModel) HandleResponseChunk ¶
func (m TitanTextSubModel) HandleResponseChunk(chunkBytes []byte) ([]byte, error)
func (TitanTextSubModel) MakeBody ¶
func (m TitanTextSubModel) MakeBody(req *aicli.GenerateRequest) ([]byte, error)
Click to show internal directories.
Click to hide internal directories.