Documentation ¶
Index ¶
- Constants
- func AddFileMessage(conv *Conversation, path string) error
- func AddResource(conv *Conversation, path string, pathType string) error
- func AddURLReference(conv *Conversation, path string) error
- func EstimateMessageTokens(m openai.ChatCompletionMessage) (int, error)
- func GetModelProvider(name string) (string, error)
- func ManageRAG(conv *Conversation, userInput string) (string, []string, error)
- func MustCheckConnection(client *openai.Client)
- func MustLoadAPIKey(openai bool, anyscale bool) error
- type AnyscaleModel
- type Client
- func (c *Client) SendCompletionRequest(ctx context.Context, conv *Conversation, userPrompt string) (string, error)
- func (c *Client) SendStreamRequest(ctx context.Context, conv *Conversation, userPrompt string, ...)
- func (c *Client) SetModel(model string)
- func (c *Client) SetTemperature(temp float32)
- type Conversation
- type OpenAIModel
Constants ¶
const ( Mistral7BInstruct AnyscaleModel = "mistralai/Mistral-7B-Instruct-v0.1" Llama27bChat AnyscaleModel = "meta-llama/Llama-2-7b-chat-hf" Llama213bChat AnyscaleModel = "meta-llama/Llama-2-13b-chat-hf" Llama270bChat AnyscaleModel = "meta-llama/Llama-2-70b-chat-hf" Mixtral8x7BInstruct AnyscaleModel = "mistralai/Mixtral-8x7B-Instruct-v0.1" CodeLlama34b AnyscaleModel = "codellama/CodeLlama-34b-Instruct-hf" CodeLlama70b AnyscaleModel = "codellama/CodeLlama-70b-Instruct-hf" GPT35Turbo OpenAIModel = "gpt-3.5-turbo" GPT4TurboPreview OpenAIModel = "gpt-4-turbo-preview" GPT4Turbo OpenAIModel = "gpt-4-turbo" )
const ( /* 04/12/2024 Pricing: gpt-3.5-turbo-0125 Input: $0.50 / 1M tokens Output: $1.50 / 1M tokens gpt-4-turbo-2024-04-09 Input: $10.00 / 1M tokens Output: $30.00 / 1M tokens Mixtral-8x7B-Instruct-v0.1 Input: 0.50 / 1M tokens Output: 0.50 / 1M tokens */ DefaultMaxTokens = 100000 // $0.05, $0.15 | $1.00, $3.00 )
Variables ¶
This section is empty.
Functions ¶
func AddFileMessage ¶ added in v0.4.2
func AddFileMessage(conv *Conversation, path string) error
func AddResource ¶ added in v0.4.2
func AddResource(conv *Conversation, path string, pathType string) error
Generate a resource message based on the path and type, return the message to append to the conversation
func AddURLReference ¶ added in v0.4.2
func AddURLReference(conv *Conversation, path string) error
func EstimateMessageTokens ¶ added in v0.4.0
Estimate the number of tokens in the message using the OpenAI tokenizer
func GetModelProvider ¶ added in v0.4.3
func ManageRAG ¶ added in v0.4.0
func ManageRAG(conv *Conversation, userInput string) (string, []string, error)
Determine if the user's input contains a resource command There is usually some limit to the number of tokens
func MustCheckConnection ¶
func MustLoadAPIKey ¶
Ensure we have the right env variables set for the given source
Types ¶
type AnyscaleModel ¶
type AnyscaleModel string
func IsAnyscaleModel ¶
func IsAnyscaleModel(name string) (AnyscaleModel, bool)
func (AnyscaleModel) String ¶
func (a AnyscaleModel) String() string
type Client ¶
func MustConnectAnyscale ¶
func MustConnectAnyscale(model AnyscaleModel, temperature float32) *Client
func MustConnectOpenAI ¶
func MustConnectOpenAI(model OpenAIModel, temperature float32) *Client
func (*Client) SendCompletionRequest ¶
func (c *Client) SendCompletionRequest(ctx context.Context, conv *Conversation, userPrompt string) (string, error)
Waits for the entire response to be returned Adds the users request, and the response to the conversation
func (*Client) SendStreamRequest ¶
func (c *Client) SendStreamRequest(ctx context.Context, conv *Conversation, userPrompt string, responseChan chan string, errChan chan error)
Stream the response as it comes in Adds the users request, and the response to the conversation
func (*Client) SetTemperature ¶
type Conversation ¶
type Conversation struct { Messages []openai.ChatCompletionMessage TokenCount int MaxTokens int RagEnabled bool *sync.Mutex // contains filtered or unexported fields }
func NewConversation ¶
func NewConversation(systemPrompt string, maxTokens int, ragEnabled bool) *Conversation
Start a new conversation with the system prompt A system prompt defines the initial context of the conversation This includes the persona of the bot and any information that you want to provide to the model.
func (*Conversation) AddReference ¶ added in v0.4.2
func (c *Conversation) AddReference(id string, content string) error
Add a reference to the conversation
func (*Conversation) Append ¶
func (c *Conversation) Append(m openai.ChatCompletionMessage) error
func (*Conversation) SeedConversation ¶
func (c *Conversation) SeedConversation(requestResponseMap map[string]string)
type OpenAIModel ¶
type OpenAIModel string
func IsOpenAIModel ¶
func IsOpenAIModel(name string) (OpenAIModel, bool)
func (OpenAIModel) String ¶
func (o OpenAIModel) String() string