chatgpt

package module
v0.0.0-...-d663526 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 25, 2024 License: MIT Imports: 10 Imported by: 0

README

Go-ChatGPT

GoDoc codecov Go

Go-ChatGPT is an open-source GoLang client for ChatGPT, a large language model trained by OpenAI. With Go-ChatGPT, you can quickly and easily integrate ChatGPT's language processing capabilities into your Go applications.

Features

  • Easy-to-use GoLang client for ChatGPT
  • Sends text to ChatGPT and receives a response
  • Support custom model and parameters
  • Supports GPT3.5 and GPT4 models

Installation

You can install ChatGPT-Go using Go modules:

go get github.com/hanzei/go-chatgpt

Getting Started

Get your API key from the OpenAI Dashboard - https://platform.openai.com/account/api-keys and export this either as an environment variable, or put this your .bashrc or .zshrc

export OPENAI_KEY=sk...

  1. In your Go code, import the ChatGPT-Go package:

    import (
        "github.com/hanzei/go-chatgpt"
    )
    
  2. Create a new ChatGPT client with your API key

    key := os.Getenv("OPENAI_KEY")
    
    client, err := chatgpt.NewClient(key)
    if err != nil {
    	log.Fatal(err)
    }
    
  3. Use the SimpleSend API to send text to ChatGPT and get a response.

     ctx := context.Background()
    
     res, err := c.SimpleSend(ctx, "Hello, how are you?")
     if err != nil {
     	// handle error
     }
    

    The SimpleSend method sends the specified text to ChatGPT and returns a response. If an error occurs, it returns an error message.

  4. To use a custom model/parameters, use the Send API.

     ctx := context.Background()
    
     res, err = c.Send(ctx, &chatgpt.ChatCompletionRequest{
     	Model: chatgpt.GPT35Turbo,
     	Messages: []chatgpt.ChatMessage{
     		{
     			Role: chatgpt.ChatGPTModelRoleSystem,
     			Content: "Hey, Explain GoLang to me in 2 sentences.",
     		},
     	},
     })
     if err != nil {
     	// handle error
     }
    

Contribute

If you want to contribute to this project, feel free to open a PR or an issue.

License

This package is licensed under MIT license. See LICENSE for details.

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	// ErrAPIKeyRequired is returned when the API Key is not provided
	ErrAPIKeyRequired = errors.New("API Key is required")

	// ErrInvalidModel is returned when the model is invalid
	ErrInvalidModel = errors.New("invalid model")

	// ErrNoMessages is returned when no messages are provided
	ErrNoMessages = errors.New("no messages provided")

	// ErrInvalidRole is returned when the role is invalid
	ErrInvalidRole = errors.New("invalid role. Only `user`, `system` and `assistant` are supported")

	// ErrInvalidTemperature is returned when the temperature is invalid
	ErrInvalidTemperature = errors.New("invalid temperature. 0<= temp <= 2")

	// ErrInvalidPresencePenalty
	ErrInvalidPresencePenalty = errors.New("invalid presence penalty. -2<= presence penalty <= 2")

	// ErrInvalidFrequencyPenalty
	ErrInvalidFrequencyPenalty = errors.New("invalid frequency penalty. -2<= frequency penalty <= 2")
)

Functions

This section is empty.

Types

type ChatCompletionRequest

type ChatCompletionRequest struct {
	// (Required)
	// ID of the model to use.
	Model ChatGPTModel `json:"model"`

	// Required
	// The messages to generate chat completions for
	Messages []ChatMessage `json:"messages"`

	// (Optional - default: 1)
	// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.
	// We generally recommend altering this or top_p but not both.
	Temperature float64 `json:"temperature,omitempty"`

	// (Optional - default: 1)
	// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
	// We generally recommend altering this or temperature but not both.
	Top_P float64 `json:"top_p,omitempty"`

	// (Optional - default: 1)
	// How many chat completion choices to generate for each input message.
	N int `json:"n,omitempty"`

	// (Optional - default: infinite)
	// The maximum number of tokens allowed for the generated answer. By default,
	// the number of tokens the model can return will be (4096 - prompt tokens).
	MaxTokens int `json:"max_tokens,omitempty"`

	// (Optional - default: 0)
	// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far,
	// increasing the model's likelihood to talk about new topics.
	PresencePenalty float64 `json:"presence_penalty,omitempty"`

	// (Optional - default: 0)
	// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far,
	// decreasing the model's likelihood to repeat the same line verbatim.
	FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`

	// (Optional)
	// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse
	User string `json:"user,omitempty"`
}

type ChatGPTModel

type ChatGPTModel string
const (
	GPT35Turbo ChatGPTModel = "gpt-3.5-turbo"

	// Deprecated: Use gpt-3.5-turbo-0613 instead, model will discontinue on 09/13/2023
	GPT35Turbo0301 ChatGPTModel = "gpt-3.5-turbo-0301"

	GPT35Turbo0613    ChatGPTModel = "gpt-3.5-turbo-0613"
	GPT35Turbo16k     ChatGPTModel = "gpt-3.5-turbo-16k"
	GPT35Turbo16k0613 ChatGPTModel = "gpt-3.5-turbo-16k-0613"
	GPT4              ChatGPTModel = "gpt-4"

	// Deprecated: Use gpt-4-0613 instead, model will discontinue on 09/13/2023
	GPT4_0314 ChatGPTModel = "gpt-4-0314"

	GPT4_0613 ChatGPTModel = "gpt-4-0613"
	GPT4_32k  ChatGPTModel = "gpt-4-32k"

	// Deprecated: Use gpt-4-32k-0613 instead, model will discontinue on 09/13/2023
	GPT4_32k_0314 ChatGPTModel = "gpt-4-32k-0314"

	GPT4_32k_0613 ChatGPTModel = "gpt-4-32k-0613"
)

type ChatGPTModelRole

type ChatGPTModelRole string
const (
	ChatGPTModelRoleUser      ChatGPTModelRole = "user"
	ChatGPTModelRoleSystem    ChatGPTModelRole = "system"
	ChatGPTModelRoleAssistant ChatGPTModelRole = "assistant"
)

type ChatMessage

type ChatMessage struct {
	Role    ChatGPTModelRole `json:"role"`
	Content string           `json:"content"`
}

type ChatResponse

type ChatResponse struct {
	ID        string               `json:"id"`
	Object    string               `json:"object"`
	CreatedAt int64                `json:"created_at"`
	Choices   []ChatResponseChoice `json:"choices"`
	Usage     ChatResponseUsage    `json:"usage"`
}

type ChatResponseChoice

type ChatResponseChoice struct {
	Index        int         `json:"index"`
	Message      ChatMessage `json:"message"`
	FinishReason string      `json:"finish_reason"`
}

type ChatResponseUsage

type ChatResponseUsage struct {
	Prompt_Tokens     int `json:"prompt_tokens"`
	Completion_Tokens int `json:"completion_tokens"`
	Total_Tokens      int `json:"total_tokens"`
}

type Client

type Client struct {
	// contains filtered or unexported fields
}

func NewClient

func NewClient(apikey string) (*Client, error)

func NewClientWithConfig

func NewClientWithConfig(config *Config) (*Client, error)

func (*Client) CancelFineTuningJob

func (c *Client) CancelFineTuningJob(ctx context.Context, fineTuningJobID string) (*FineTuningJob, error)

CancelFineTuningJob implements https://platform.openai.com/docs/api-reference/fine-tuning/cancel.

func (*Client) CreateFineTuningRequest

func (c *Client) CreateFineTuningRequest(ctx context.Context, req FineTuningRequest) (*FineTuningResponse, error)

CreateFineTuningRequest implements https://platform.openai.com/docs/api-reference/fine-tuning/create.

func (*Client) DeleteFile

func (c *Client) DeleteFile(ctx context.Context, fileID string) (*DeleteFileResponse, error)

DeleteFile implements https://platform.openai.com/docs/api-reference/files/delete.

func (*Client) ListFiles

func (c *Client) ListFiles(ctx context.Context) (*FileList, error)

ListFiles implements https://platform.openai.com/docs/api-reference/files/list.

func (*Client) ListFineTuningEvents

func (c *Client) ListFineTuningEvents(ctx context.Context, fineTuningJobID string, opts *ListOptions) (*FineTuningEventsList, error)

ListFineTuningEvents implements https://platform.openai.com/docs/api-reference/fine-tuning/list-events.

func (*Client) ListFineTuningJobs

func (c *Client) ListFineTuningJobs(ctx context.Context, opts *ListOptions) (*FineTuningList, error)

ListFineTuningJobs implements https://platform.openai.com/docs/api-reference/fine-tuning/list.

func (*Client) RetrieveFile

func (c *Client) RetrieveFile(ctx context.Context, fileID string) (*File, error)

RetrieveFile implements https://platform.openai.com/docs/api-reference/files/retrieve.

func (*Client) RetrieveFileContent

func (c *Client) RetrieveFileContent(ctx context.Context, fileID string) (string, error)

RetrieveFileContent implements https://platform.openai.com/docs/api-reference/files/retrieve-contents.

func (*Client) RetrieveFineTuningJob

func (c *Client) RetrieveFineTuningJob(ctx context.Context, fineTuningJobID string) (*FineTuningJob, error)

RetrieveFineTuningJob implements https://platform.openai.com/docs/api-reference/fine-tuning/retrieve.

func (*Client) Send

func (*Client) SimpleSend

func (c *Client) SimpleSend(ctx context.Context, message string) (*ChatResponse, error)

func (*Client) UploadFile

func (c *Client) UploadFile(ctx context.Context, file io.Reader, purpose FilePurpose) (*File, error)

UploadFile implements https://platform.openai.com/docs/api-reference/files/create.

type Config

type Config struct {
	// Base URL for API requests.
	BaseURL string

	// API Key (Required)
	APIKey string

	// Organization ID (Optional)
	OrganizationID string
}

type DeleteFileResponse

type DeleteFileResponse struct {
	ID      string `json:"id"`
	Object  string `json:"object"`
	Deleted bool   `json:"deleted"`
}

type File

type File struct {
	ID            string      `json:"id"`
	Object        string      `json:"object"`
	Bytes         int         `json:"bytes"`
	CreatedAt     int         `json:"created_at"`
	Filename      string      `json:"filename"`
	Purpose       FilePurpose `json:"purpose"`
	Status        FileStatus  `json:"status"`         // Deprecated
	StatusDetails string      `json:"status_details"` // Deprecated
}

type FileList

type FileList struct {
	Data   []File `json:"data"`
	Object string `json:"object"`
}

type FilePurpose

type FilePurpose string
const (
	FilePurposeFinetune         FilePurpose = "fine-tune"
	FilePurposeFinetuneResults  FilePurpose = "fine-tune-results"
	FilePurposeAssistants       FilePurpose = "assistants"
	FilePurposeAssistantsOutput FilePurpose = "assistants_output"
)

type FileStatus

type FileStatus string
const (
	FilestatusUploaded  FileStatus = "uploaded"
	FilestatusProcessed FileStatus = "processed"
	FilestatusError     FileStatus = "error"
)

type FineTuningEvent

type FineTuningEvent struct {
	Object    string `json:"object"`
	ID        string `json:"id"`
	CreatedAt int    `json:"created_at"`
	Level     string `json:"level"`
	Message   string `json:"message"`
	Data      any    `json:"data,omitempty"`
	Type      string `json:"type"`
}

type FineTuningEventsList

type FineTuningEventsList struct {
	Object  string            `json:"object"`
	Data    []FineTuningEvent `json:"data"`
	HasMore bool              `json:"has_more"`
}

TODO: Use generics to create an abstract List type.

type FineTuningJob

type FineTuningJob struct {
	ID        string `json:"id"`
	CreatedAt int    `json:"created_at"`
	Error     struct {
		Code    string `json:"code"`
		Message string `json:"message"`
		Param   string `json:"param,omitempty"`
	} `json:"error,omitempty"`
	FineTunedModel  string `json:"fine_tuned_model,omitempty"`
	FinishedAt      int    `json:"finished_at,omitempty"`
	Hyperparameters struct {
		NEpochs int `json:"n_epochs"`
	} `json:"hyperparameters,omitempty"`
	Model          string              `json:"model,omitempty"`
	Object         string              `json:"object"`
	OrganizationID string              `json:"organization_id"`
	ResultFiles    []string            `json:"result_files"`
	Status         FineTuningJobStatus `json:"status"`
	TrainedTokens  int                 `json:"trained_tokens,omitempty"`
	TrainingFile   string              `json:"training_file"`
	ValidationFile string              `json:"validation_file,omitempty"`
}

type FineTuningJobStatus

type FineTuningJobStatus string
const (
	FineTuningJobStatusValidatingFile FineTuningJobStatus = "validating_files"
	FineTuningJobStatusQueued         FineTuningJobStatus = "queued"
	FineTuningJobStatusRunning        FineTuningJobStatus = "running"
	FineTuningJobStatusSucceeded      FineTuningJobStatus = "succeeded"
	FineTuningJobStatusFailed         FineTuningJobStatus = "failed"
	FineTuningJobStatusCancelled      FineTuningJobStatus = "cancelled"
)

type FineTuningList

type FineTuningList struct {
	Object  string          `json:"object"`
	Data    []FineTuningJob `json:"data"`
	HasMore bool            `json:"has_more"`
}

TODO: Use generics to create an abstract List type.

type FineTuningRequest

type FineTuningRequest struct {
	Model           ChatGPTModel `json:"model"`
	TrainingFile    string       `json:"training_file"`
	Hyperparameters struct {
		NEpochs int `json:"n_epochs,omitempty"` // Optional
	} `json:"hyperparameters,omitempty"` // Optional
	Suffix         string `json:"suffix,omitempty"`          // Optional
	ValidationFile string `json:"validation_file,omitempty"` // Optional
}

type FineTuningResponse

type FineTuningResponse struct {
	Object         string `json:"object"`
	ID             string `json:"id"`
	Model          string `json:"model"`
	CreatedAt      int    `json:"created_at"`
	FineTunedModel any    `json:"fine_tuned_model"`
	OrganizationID string `json:"organization_id"`
	ResultFiles    []any  `json:"result_files"`
	Status         string `json:"status"`
	ValidationFile any    `json:"validation_file"`
	TrainingFile   string `json:"training_file"`
}

type ListOptions

type ListOptions struct {
	After *string // Identifier for the last event from the previous pagination request.
	Limit *int    // Number of events to retrieve. Defaults to 20.
}

func (*ListOptions) Encode

func (opts *ListOptions) Encode() string

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL