Documentation ¶
Overview ¶
Package http implements http server.
Index ¶
- Constants
- func AbortErr(ctx *gin.Context, err error) bool
- func Call(name, args string) (string, error)
- func Chat(ctx *gin.Context)
- func ChatHandler(ctx *gin.Context)
- func ChatModel() string
- func CopyHeader(to, from http.Header)
- func CountVisionImagePrice(width int, height int, resolution VisionImageResolution) (int, error)
- func DownloadUserConfig(ctx *gin.Context)
- func DrawByDalleHandler(ctx *gin.Context)
- func DrawByLcmHandler(ctx *gin.Context)
- func DrawBySdxlturboHandler(ctx *gin.Context)
- func GetCurrentUser(ctx *gin.Context)
- func GetCurrentUserQuota(ctx *gin.Context)
- func GetUserInternalBill(ctx context.Context, user *config.UserConfig, billType db.BillingType) (bill *db.Billing, err error)
- func OneShotChatHandler(gctx *gin.Context)
- func OneapiProxyHandler(ctx *gin.Context)
- func OneshotChat(ctx context.Context, user *config.UserConfig, systemPrompt, userPrompt string) (answer string, err error)
- func PaymentHandler(c *gin.Context)
- func PaymentStaticHandler(c *gin.Context)
- func RamjetProxyHandler(ctx *gin.Context)
- func RegisterStatic(g gin.IRouter)
- func SaveLlmConservationHandler(ctx *gin.Context)
- func SetupHTTPCli() (err error)
- func Tiktoken() *tiktoken.Tiktoken
- func UploadFiles(ctx *gin.Context)
- func UploadUserConfig(ctx *gin.Context)
- type AzureCreateImageResponse
- type DrawImageByImageRequest
- type DrawImageByLcmRequest
- type DrawImageByLcmResponse
- type DrawImageBySdxlturboRequest
- type DrawImageBySdxlturboResponse
- type DrawImageByTextRequest
- type ExternalBillingUserResponse
- type ExternalBillingUserStatus
- type FrontendReq
- type FrontendReqMessage
- type LLMConservationReq
- type OneShotChatRequest
- type OpenaiChatReq
- type OpenaiChatReqTool
- type OpenaiChatReqToolFunction
- type OpenaiChatReqToolLocation
- type OpenaiChatReqToolParameters
- type OpenaiChatReqToolProperties
- type OpenaiChatReqToolUnit
- type OpenaiCompletionReq
- type OpenaiCompletionResp
- type OpenaiCompletionStreamResp
- type OpenaiCompletionStreamRespChoice
- type OpenaiCompletionStreamRespDelta
- type OpenaiCompletionStreamRespToolCall
- type OpenaiCreateImageRequest
- type OpenaiCreateImageResponse
- type OpenaiMessageRole
- type OpenaiReqMessage
- type OpenaiVisionMessageContent
- type OpenaiVisionMessageContentImageUrl
- type OpenaiVisionMessageContentType
- type UserQueryType
- type VisionImageResolution
Constants ¶
const ( // OpenaiMessageRoleSystem system message OpenaiMessageRoleSystem = "system" // OpenaiMessageRoleUser user message OpenaiMessageRoleUser = "user" // OpenaiMessageRoleAI ai message OpenaiMessageRoleAI = "assistant" )
const VisionTokenPrice = 5000
VisionTokenPrice vision token price($/500000)
Variables ¶
This section is empty.
Functions ¶
func CountVisionImagePrice ¶
func CountVisionImagePrice(width int, height int, resolution VisionImageResolution) (int, error)
CountVisionImagePrice count vision image tokens
func DownloadUserConfig ¶
func DrawByDalleHandler ¶
func DrawByLcmHandler ¶
func DrawBySdxlturboHandler ¶
func GetCurrentUserQuota ¶
func GetUserInternalBill ¶
func GetUserInternalBill(ctx context.Context, user *config.UserConfig, billType db.BillingType) ( bill *db.Billing, err error)
GetUserInternalBill get user internal bill
func OneShotChatHandler ¶
OneShotChatHandler handle one shot chat request
func OneapiProxyHandler ¶
OneapiProxyHandler proxy to oneapi url
func OneshotChat ¶
func OneshotChat(ctx context.Context, user *config.UserConfig, systemPrompt, userPrompt string) (answer string, err error)
OneshotChat get ai response from gpt-3.5-turbo
Args: ¶
- systemPrompt: system prompt
- userPrompt: user prompt
func PaymentHandler ¶
func PaymentStaticHandler ¶
func RamjetProxyHandler ¶
RamjetProxyHandler proxy to ramjet url
func SaveLlmConservationHandler ¶
SaveLlmConservationHandler save llm conservation
func Tiktoken ¶
func Tiktoken() *tiktoken.Tiktoken
Tiktoken return tiktoken, could be nil if not found
func UploadUserConfig ¶
Types ¶
type AzureCreateImageResponse ¶
type AzureCreateImageResponse struct { Created int64 `json:"created"` Data []struct { RevisedPrompt string `json:"revised_prompt"` Url string `json:"url"` } `json:"data"` }
AzureCreateImageResponse return from azure image api
type DrawImageByImageRequest ¶
type DrawImageByImageRequest struct { Prompt string `json:"prompt" binding:"required,min=1"` Model string `json:"model" binding:"required,min=1"` ImageBase64 string `json:"image_base64" binding:"required,min=1"` }
DrawImageByImageRequest draw image by image and prompt
type DrawImageByLcmRequest ¶
type DrawImageByLcmRequest struct { // Data consist of 6 strings: // 1. prompt, // 2. base64 encoded image with fixed prefix "data:image/png;base64," // 3. steps // 4. cfg // 5. sketch strength // 6. seed Data [6]any `json:"data"` FnIndex int `json:"fn_index"` }
DrawImageByLcmRequest draw image by image and prompt with lcm
type DrawImageByLcmResponse ¶
type DrawImageByLcmResponse struct { // Data base64 encoded image with fixed prefix "data:image/png;base64," Data []string `json:"data"` IsGenerating bool `json:"is_generating"` Duration float64 `json:"duration"` AverageDuration float64 `json:"average_duration"` }
DrawImageByLcmResponse draw image by image and prompt with lcm
type DrawImageBySdxlturboResponse ¶
type DrawImageBySdxlturboResponse struct {
B64Images []string `json:"images"`
}
type DrawImageByTextRequest ¶
type DrawImageByTextRequest struct { Prompt string `json:"prompt" binding:"required,min=1"` Model string `json:"model" binding:"required,min=1"` }
DrawImageByTextRequest draw image by text and prompt
type ExternalBillingUserResponse ¶
type ExternalBillingUserResponse struct { Data struct { Status ExternalBillingUserStatus `json:"status"` RemainQuota db.Price `json:"remain_quota"` } `json:"data"` }
ExternalBillingUserResponse return from external billing api
func GetUserExternalBillingQuota ¶
func GetUserExternalBillingQuota(ctx context.Context, user *config.UserConfig) ( externalBalanceResp *ExternalBillingUserResponse, err error)
GetUserExternalBillingQuota get user external billing quota
type ExternalBillingUserStatus ¶
type ExternalBillingUserStatus int
ExternalBillingUserStatus user status
const ( // ExternalBillingUserStatusActive active ExternalBillingUserStatusActive ExternalBillingUserStatus = 1 )
type FrontendReq ¶
type FrontendReq struct { Model string `json:"model"` MaxTokens uint `json:"max_tokens"` Messages []FrontendReqMessage `json:"messages,omitempty"` PresencePenalty float64 `json:"presence_penalty"` FrequencyPenalty float64 `json:"frequency_penalty"` Stream bool `json:"stream"` Temperature float64 `json:"temperature"` TopP float64 `json:"top_p"` N int `json:"n"` // LaiskyExtra some special config for laisky LaiskyExtra *struct { ChatSwitch struct { // DisableHttpsCrawler disable https crawler DisableHttpsCrawler bool `json:"disable_https_crawler"` // EnableGoogleSearch enable google search EnableGoogleSearch bool `json:"enable_google_search"` } `json:"chat_switch"` } `json:"laisky_extra,omitempty"` }
FrontendReq request from frontend
func (*FrontendReq) PromptTokens ¶
func (r *FrontendReq) PromptTokens() (n int)
PromptTokens count prompt tokens
type FrontendReqMessage ¶
type FrontendReqMessage struct { Role OpenaiMessageRole `json:"role"` Content string `json:"content"` // Files send files with message Files []frontendReqMessageFiles `json:"files"` }
FrontendReqMessage request message from frontend
type LLMConservationReq ¶
type LLMConservationReq struct { Model string `json:"model" binding:"required,min=1"` MaxTokens uint `json:"max_tokens" binding:"required,min=1"` Messages []FrontendReqMessage `json:"messages" binding:"required,min=1"` Response string `json:"response" binding:"required,min=1"` }
type OneShotChatRequest ¶
type OneShotChatRequest struct { SystemPrompt string `json:"system_prompt"` UserPrompt string `json:"user_prompt" binding:"required,min=1"` }
OneShotChatRequest request to one-shot chat api
type OpenaiChatReq ¶
type OpenaiChatReq[T string | []OpenaiVisionMessageContent] struct { Model string `json:"model"` MaxTokens uint `json:"max_tokens"` Messages []OpenaiReqMessage[T] `json:"messages,omitempty"` PresencePenalty float64 `json:"presence_penalty"` FrequencyPenalty float64 `json:"frequency_penalty"` Stream bool `json:"stream"` Temperature float64 `json:"temperature"` TopP float64 `json:"top_p"` N int `json:"n"` Tools []OpenaiChatReqTool `json:"tools,omitempty"` }
OpenaiChatReq request to openai chat api
type OpenaiChatReqTool ¶
type OpenaiChatReqTool struct { Type string `json:"type"` Function OpenaiChatReqToolFunction `json:"function"` Parameters OpenaiChatReqToolParameters `json:"parameters"` }
OpenaiChatReqTool define tools
{ "type": "function", "function": { "name": "get_current_weather", "description": "Get the current weather in a given location", "parameters": { "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, e.g. San Francisco, CA" }, "unit": { "type": "string", "enum": [ "celsius", "fahrenheit" ] } }, "required": [ "location" ] } } }
func ToolsRequest ¶
func ToolsRequest() []OpenaiChatReqTool
type OpenaiChatReqToolParameters ¶
type OpenaiChatReqToolParameters struct { Type string `json:"type"` Properties OpenaiChatReqToolProperties `json:"properties"` Required []string `json:"required"` }
type OpenaiChatReqToolProperties ¶
type OpenaiChatReqToolProperties struct { Location OpenaiChatReqToolLocation `json:"location"` Unit OpenaiChatReqToolUnit `json:"unit"` }
type OpenaiChatReqToolUnit ¶
type OpenaiCompletionReq ¶
type OpenaiCompletionReq struct { Model string `json:"model"` MaxTokens uint `json:"max_tokens"` PresencePenalty float64 `json:"presence_penalty"` FrequencyPenalty float64 `json:"frequency_penalty"` Stream bool `json:"stream"` Temperature float64 `json:"temperature"` TopP float64 `json:"top_p"` N int `json:"n"` Prompt string `json:"prompt,omitempty"` }
OpenaiCompletionReq request to openai chat api
type OpenaiCompletionResp ¶
type OpenaiCompletionResp struct { ID string `json:"id"` Object string `json:"object"` Model string `json:"model"` Usage struct { PromptTokens int `json:"prompt_tokens"` CompletionTokens int `json:"completion_tokens"` TotalTokens int `json:"total_tokens"` } `json:"usage"` Choices []struct { Message struct { Role string `json:"role"` Content string `json:"content"` } FinishReason string `json:"finish_reason"` Index int `json:"index"` } `json:"choices"` }
nolint: lll OpenaiCompletionResp return from openai chat api
https://platform.openai.com/docs/guides/chat/response-format
{ "id": "chatcmpl-6p9XYPYSTTRi0xEviKjjilqrWU2Ve", "object": "chat.completion", "created": 1677649420, "model": "gpt-3.5-turbo", "usage": {"prompt_tokens": 56, "completion_tokens": 31, "total_tokens": 87}, "choices": [ { "message": { "role": "assistant", "content": "The 2020 World Series was played in Arlington, Texas at the Globe Life Field, which was the new home stadium for the Texas Rangers."}, "finish_reason": "stop", "index": 0 } ] }
type OpenaiCompletionStreamResp ¶
type OpenaiCompletionStreamResp struct { ID string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Model string `json:"model"` Choices []OpenaiCompletionStreamRespChoice `json:"choices"` }
OpenaiCompletionStreamResp stream chunk return from openai chat api
{ "id":"chatcmpl-6tCPrEY0j5l157mOIddZi4I0tIFhv", "object":"chat.completion.chunk", "created":1678613787, "model":"gpt-3.5-turbo-0301", "choices":[{"delta":{"role":"assistant"}, "index":0, "finish_reason":null}] }
type OpenaiCompletionStreamRespChoice ¶
type OpenaiCompletionStreamRespChoice struct { Delta OpenaiCompletionStreamRespDelta `json:"delta"` Index int `json:"index"` FinishReason string `json:"finish_reason"` }
type OpenaiCompletionStreamRespDelta ¶
type OpenaiCompletionStreamRespDelta struct { Role OpenaiMessageRole `json:"role"` Content string `json:"content"` ToolCalls []OpenaiCompletionStreamRespToolCall `json:"tool_calls,omitempty"` }
type OpenaiCompletionStreamRespToolCall ¶
type OpenaiCompletionStreamRespToolCall struct { ID string `json:"id"` Type string `json:"type"` Function struct { Name string `json:"name"` Arguments string `json:"arguments"` } `json:"function"` }
OpenaiCompletionStreamRespToolCall tool call
{ "id": "call_abc123", "type": "function", "function": { "name": "get_current_weather", "arguments": "{\n\"location\": \"Boston, MA\"\n}" } }
type OpenaiCreateImageRequest ¶
type OpenaiCreateImageRequest struct { Model string `json:"model,omitempty"` Prompt string `json:"prompt"` N int `json:"n"` Size string `json:"size"` Quality string `json:"quality,omitempty"` ResponseFormat string `json:"response_format,omitempty"` Style string `json:"style,omitempty"` }
OpenaiCreateImageRequest request to openai image api
func NewOpenaiCreateImageRequest ¶
func NewOpenaiCreateImageRequest(prompt string) *OpenaiCreateImageRequest
NewOpenaiCreateImageRequest create new request
type OpenaiCreateImageResponse ¶
type OpenaiCreateImageResponse struct { Created int64 `json:"created"` Data []struct { Url string `json:"url"` B64Json string `json:"b64_json"` } `json:"data"` }
OpenaiCreateImageResponse return from openai image api
type OpenaiReqMessage ¶
type OpenaiReqMessage[T string | []OpenaiVisionMessageContent] struct { Role OpenaiMessageRole `json:"role"` Content T `json:"content"` }
OpenaiReqMessage request message to openai chat api
chat completion message and vision message have different content
type OpenaiVisionMessageContent ¶
type OpenaiVisionMessageContent struct { Type OpenaiVisionMessageContentType `json:"type"` Text string `json:"text,omitempty"` ImageUrl *OpenaiVisionMessageContentImageUrl `json:"image_url,omitempty"` }
OpenaiVisionMessageContent vision message content
type OpenaiVisionMessageContentImageUrl ¶
type OpenaiVisionMessageContentImageUrl struct { URL string `json:"url"` Detail VisionImageResolution `json:"detail,omitempty"` }
OpenaiVisionMessageContentImageUrl image url
type OpenaiVisionMessageContentType ¶
type OpenaiVisionMessageContentType string
OpenaiVisionMessageContentType vision message content type
const ( // OpenaiVisionMessageContentTypeText text OpenaiVisionMessageContentTypeText OpenaiVisionMessageContentType = "text" // OpenaiVisionMessageContentTypeImageUrl image url OpenaiVisionMessageContentTypeImageUrl OpenaiVisionMessageContentType = "image_url" )
type UserQueryType ¶
type UserQueryType string
UserQueryType user query type
const ( // UserQueryTypeSearch search by embeddings chunks UserQueryTypeSearch UserQueryType = "search" // UserQueryTypeScan scan by map-reduce UserQueryTypeScan UserQueryType = "scan" )
type VisionImageResolution ¶
type VisionImageResolution string
VisionImageResolution image resolution
const ( // VisionImageResolutionLow low resolution VisionImageResolutionLow VisionImageResolution = "low" // VisionImageResolutionHigh high resolution VisionImageResolutionHigh VisionImageResolution = "high" )