http

package
v0.0.0-...-51f28c1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 27, 2024 License: MIT Imports: 56 Imported by: 0

Documentation

Overview

Package http implements http server.

Index

Constants

View Source
const (
	// OpenaiMessageRoleSystem system message
	OpenaiMessageRoleSystem = "system"
	// OpenaiMessageRoleUser user message
	OpenaiMessageRoleUser = "user"
	// OpenaiMessageRoleAI ai message
	OpenaiMessageRoleAI = "assistant"
)
View Source
const VisionTokenPrice = 5000

VisionTokenPrice vision token price($/500000)

Variables

This section is empty.

Functions

func AbortErr

func AbortErr(ctx *gin.Context, err error) bool

AbortErr abort with error

func Call

func Call(name, args string) (string, error)

func Chat

func Chat(ctx *gin.Context)

Chat render chat page

func ChatHandler

func ChatHandler(ctx *gin.Context)

ChatHandler handle api request

func ChatModel

func ChatModel() string

ChatModel return chat model

func CopyHeader

func CopyHeader(to, from http.Header)

CopyHeader copy header from `from` to `to`

func CountVisionImagePrice

func CountVisionImagePrice(width int, height int, resolution VisionImageResolution) (int, error)

CountVisionImagePrice count vision image tokens

https://openai.com/pricing

func DownloadUserConfig

func DownloadUserConfig(ctx *gin.Context)

func DrawByDalleHandler

func DrawByDalleHandler(ctx *gin.Context)

func DrawByLcmHandler

func DrawByLcmHandler(ctx *gin.Context)

func DrawBySdxlturboHandler

func DrawBySdxlturboHandler(ctx *gin.Context)

func GetCurrentUser

func GetCurrentUser(ctx *gin.Context)

GetCurrentUser get current user

func GetCurrentUserQuota

func GetCurrentUserQuota(ctx *gin.Context)

func GetUserInternalBill

func GetUserInternalBill(ctx context.Context,
	user *config.UserConfig, billType db.BillingType) (
	bill *db.Billing, err error)

GetUserInternalBill get user internal bill

func OneShotChatHandler

func OneShotChatHandler(gctx *gin.Context)

OneShotChatHandler handle one shot chat request

func OneapiProxyHandler

func OneapiProxyHandler(ctx *gin.Context)

func OneshotChat

func OneshotChat(ctx context.Context, user *config.UserConfig, systemPrompt, userPrompt string) (answer string, err error)

OneshotChat get ai response from gpt-3.5-turbo

Args:

  • systemPrompt: system prompt
  • userPrompt: user prompt

func PaymentHandler

func PaymentHandler(c *gin.Context)

func PaymentStaticHandler

func PaymentStaticHandler(c *gin.Context)

func RamjetProxyHandler

func RamjetProxyHandler(ctx *gin.Context)

RamjetProxyHandler proxy to ramjet url

func RegisterStatic

func RegisterStatic(g gin.IRouter)

RegisterStatic register static files

func SaveLlmConservationHandler

func SaveLlmConservationHandler(ctx *gin.Context)

SaveLlmConservationHandler save llm conservation

func SetupHTTPCli

func SetupHTTPCli() (err error)

SetupHTTPCli setup http client

func Tiktoken

func Tiktoken() *tiktoken.Tiktoken

Tiktoken return tiktoken, could be nil if not found

func UploadFiles

func UploadFiles(ctx *gin.Context)

UploadFiles upload files

func UploadUserConfig

func UploadUserConfig(ctx *gin.Context)

Types

type AzureCreateImageResponse

type AzureCreateImageResponse struct {
	Created int64 `json:"created"`
	Data    []struct {
		RevisedPrompt string `json:"revised_prompt"`
		Url           string `json:"url"`
	} `json:"data"`
}

AzureCreateImageResponse return from azure image api

type DrawImageByImageRequest

type DrawImageByImageRequest struct {
	Prompt      string `json:"prompt" binding:"required,min=1"`
	Model       string `json:"model" binding:"required,min=1"`
	ImageBase64 string `json:"image_base64" binding:"required,min=1"`
}

DrawImageByImageRequest draw image by image and prompt

type DrawImageByLcmRequest

type DrawImageByLcmRequest struct {
	// Data consist of 6 strings:
	//  1. prompt,
	//  2. base64 encoded image with fixed prefix "data:image/png;base64,"
	//  3. steps
	//  4. cfg
	//  5. sketch strength
	//  6. seed
	Data    [6]any `json:"data"`
	FnIndex int    `json:"fn_index"`
}

DrawImageByLcmRequest draw image by image and prompt with lcm

type DrawImageByLcmResponse

type DrawImageByLcmResponse struct {
	// Data base64 encoded image with fixed prefix "data:image/png;base64,"
	Data            []string `json:"data"`
	IsGenerating    bool     `json:"is_generating"`
	Duration        float64  `json:"duration"`
	AverageDuration float64  `json:"average_duration"`
}

DrawImageByLcmResponse draw image by image and prompt with lcm

type DrawImageBySdxlturboRequest

type DrawImageBySdxlturboRequest struct {
	Model string `json:"model" binding:"required,min=1"`
	// Text prompt
	Text           string `json:"text" binding:"required,min=1"`
	NegativePrompt string `json:"negative_prompt"`
	ImageB64       string `json:"image"`
	// N how many images to generate
	N int `json:"n"`
}

type DrawImageBySdxlturboResponse

type DrawImageBySdxlturboResponse struct {
	B64Images []string `json:"images"`
}

type DrawImageByTextRequest

type DrawImageByTextRequest struct {
	Prompt string `json:"prompt" binding:"required,min=1"`
	Model  string `json:"model" binding:"required,min=1"`
}

DrawImageByTextRequest draw image by text and prompt

type ExternalBillingUserResponse

type ExternalBillingUserResponse struct {
	Data struct {
		Status      ExternalBillingUserStatus `json:"status"`
		RemainQuota db.Price                  `json:"remain_quota"`
	} `json:"data"`
}

ExternalBillingUserResponse return from external billing api

func GetUserExternalBillingQuota

func GetUserExternalBillingQuota(ctx context.Context, user *config.UserConfig) (
	externalBalanceResp *ExternalBillingUserResponse, err error)

GetUserExternalBillingQuota get user external billing quota

type ExternalBillingUserStatus

type ExternalBillingUserStatus int

ExternalBillingUserStatus user status

const (
	// ExternalBillingUserStatusActive active
	ExternalBillingUserStatusActive ExternalBillingUserStatus = 1
)

type FrontendReq

type FrontendReq struct {
	Model            string               `json:"model"`
	MaxTokens        uint                 `json:"max_tokens"`
	Messages         []FrontendReqMessage `json:"messages,omitempty"`
	PresencePenalty  float64              `json:"presence_penalty"`
	FrequencyPenalty float64              `json:"frequency_penalty"`
	Stream           bool                 `json:"stream"`
	Temperature      float64              `json:"temperature"`
	TopP             float64              `json:"top_p"`
	N                int                  `json:"n"`

	// LaiskyExtra some special config for laisky
	LaiskyExtra *struct {
		ChatSwitch struct {
			// DisableHttpsCrawler disable https crawler
			DisableHttpsCrawler bool `json:"disable_https_crawler"`
			// EnableGoogleSearch enable google search
			EnableGoogleSearch bool `json:"enable_google_search"`
		} `json:"chat_switch"`
	} `json:"laisky_extra,omitempty"`
}

FrontendReq request from frontend

func (*FrontendReq) PromptTokens

func (r *FrontendReq) PromptTokens() (n int)

PromptTokens count prompt tokens

type FrontendReqMessage

type FrontendReqMessage struct {
	Role    OpenaiMessageRole `json:"role"`
	Content string            `json:"content"`
	// Files send files with message
	Files []frontendReqMessageFiles `json:"files"`
}

FrontendReqMessage request message from frontend

type LLMConservationReq

type LLMConservationReq struct {
	Model     string               `json:"model" binding:"required,min=1"`
	MaxTokens uint                 `json:"max_tokens" binding:"required,min=1"`
	Messages  []FrontendReqMessage `json:"messages" binding:"required,min=1"`
	Response  string               `json:"response" binding:"required,min=1"`
}

type OneShotChatRequest

type OneShotChatRequest struct {
	SystemPrompt string `json:"system_prompt"`
	UserPrompt   string `json:"user_prompt" binding:"required,min=1"`
}

OneShotChatRequest request to one-shot chat api

type OpenaiChatReq

type OpenaiChatReq[T string | []OpenaiVisionMessageContent] struct {
	Model            string                `json:"model"`
	MaxTokens        uint                  `json:"max_tokens"`
	Messages         []OpenaiReqMessage[T] `json:"messages,omitempty"`
	PresencePenalty  float64               `json:"presence_penalty"`
	FrequencyPenalty float64               `json:"frequency_penalty"`
	Stream           bool                  `json:"stream"`
	Temperature      float64               `json:"temperature"`
	TopP             float64               `json:"top_p"`
	N                int                   `json:"n"`
	Tools            []OpenaiChatReqTool   `json:"tools,omitempty"`
}

OpenaiChatReq request to openai chat api

type OpenaiChatReqTool

type OpenaiChatReqTool struct {
	Type       string                      `json:"type"`
	Function   OpenaiChatReqToolFunction   `json:"function"`
	Parameters OpenaiChatReqToolParameters `json:"parameters"`
}

OpenaiChatReqTool define tools

{
	"type": "function",
	"function": {
	  "name": "get_current_weather",
	  "description": "Get the current weather in a given location",
	  "parameters": {
		"type": "object",
		"properties": {
		  "location": {
			"type": "string",
			"description": "The city and state, e.g. San Francisco, CA"
		  },
		  "unit": {
			"type": "string",
			"enum": [
			  "celsius",
			  "fahrenheit"
			]
		  }
		},
		"required": [
		  "location"
		]
	  }
	}
}

func ToolsRequest

func ToolsRequest() []OpenaiChatReqTool

type OpenaiChatReqToolFunction

type OpenaiChatReqToolFunction struct {
	Name        string `json:"name"`
	Description string `json:"description"`
}

type OpenaiChatReqToolLocation

type OpenaiChatReqToolLocation struct {
	Type        string `json:"type"`
	Description string `json:"description"`
}

type OpenaiChatReqToolParameters

type OpenaiChatReqToolParameters struct {
	Type       string                      `json:"type"`
	Properties OpenaiChatReqToolProperties `json:"properties"`
	Required   []string                    `json:"required"`
}

type OpenaiChatReqToolProperties

type OpenaiChatReqToolProperties struct {
	Location OpenaiChatReqToolLocation `json:"location"`
	Unit     OpenaiChatReqToolUnit     `json:"unit"`
}

type OpenaiChatReqToolUnit

type OpenaiChatReqToolUnit struct {
	Type string   `json:"type"`
	Enum []string `json:"enum"`
}

type OpenaiCompletionReq

type OpenaiCompletionReq struct {
	Model            string  `json:"model"`
	MaxTokens        uint    `json:"max_tokens"`
	PresencePenalty  float64 `json:"presence_penalty"`
	FrequencyPenalty float64 `json:"frequency_penalty"`
	Stream           bool    `json:"stream"`
	Temperature      float64 `json:"temperature"`
	TopP             float64 `json:"top_p"`
	N                int     `json:"n"`
	Prompt           string  `json:"prompt,omitempty"`
}

OpenaiCompletionReq request to openai chat api

type OpenaiCompletionResp

type OpenaiCompletionResp struct {
	ID     string `json:"id"`
	Object string `json:"object"`
	Model  string `json:"model"`
	Usage  struct {
		PromptTokens     int `json:"prompt_tokens"`
		CompletionTokens int `json:"completion_tokens"`
		TotalTokens      int `json:"total_tokens"`
	} `json:"usage"`
	Choices []struct {
		Message struct {
			Role    string `json:"role"`
			Content string `json:"content"`
		}
		FinishReason string `json:"finish_reason"`
		Index        int    `json:"index"`
	} `json:"choices"`
}

nolint: lll OpenaiCompletionResp return from openai chat api

https://platform.openai.com/docs/guides/chat/response-format

{
	"id": "chatcmpl-6p9XYPYSTTRi0xEviKjjilqrWU2Ve",
	"object": "chat.completion",
	"created": 1677649420,
	"model": "gpt-3.5-turbo",
	"usage": {"prompt_tokens": 56, "completion_tokens": 31, "total_tokens": 87},
	"choices": [
	  {
	   "message": {
		 "role": "assistant",
		 "content": "The 2020 World Series was played in Arlington, Texas at the Globe Life Field, which was the new home stadium for the Texas Rangers."},
	   "finish_reason": "stop",
	   "index": 0
	  }
	 ]
   }

type OpenaiCompletionStreamResp

type OpenaiCompletionStreamResp struct {
	ID      string                             `json:"id"`
	Object  string                             `json:"object"`
	Created int64                              `json:"created"`
	Model   string                             `json:"model"`
	Choices []OpenaiCompletionStreamRespChoice `json:"choices"`
}

OpenaiCompletionStreamResp stream chunk return from openai chat api

{
    "id":"chatcmpl-6tCPrEY0j5l157mOIddZi4I0tIFhv",
    "object":"chat.completion.chunk",
    "created":1678613787,
    "model":"gpt-3.5-turbo-0301",
    "choices":[{"delta":{"role":"assistant"}, "index":0, "finish_reason":null}]
}

type OpenaiCompletionStreamRespChoice

type OpenaiCompletionStreamRespChoice struct {
	Delta        OpenaiCompletionStreamRespDelta `json:"delta"`
	Index        int                             `json:"index"`
	FinishReason string                          `json:"finish_reason"`
}

type OpenaiCompletionStreamRespDelta

type OpenaiCompletionStreamRespDelta struct {
	Role      OpenaiMessageRole                    `json:"role"`
	Content   string                               `json:"content"`
	ToolCalls []OpenaiCompletionStreamRespToolCall `json:"tool_calls,omitempty"`
}

type OpenaiCompletionStreamRespToolCall

type OpenaiCompletionStreamRespToolCall struct {
	ID       string `json:"id"`
	Type     string `json:"type"`
	Function struct {
		Name      string `json:"name"`
		Arguments string `json:"arguments"`
	} `json:"function"`
}

OpenaiCompletionStreamRespToolCall tool call

{
	"id": "call_abc123",
	"type": "function",
	"function": {
	  "name": "get_current_weather",
	  "arguments": "{\n\"location\": \"Boston, MA\"\n}"
	}
}

type OpenaiCreateImageRequest

type OpenaiCreateImageRequest struct {
	Model          string `json:"model,omitempty"`
	Prompt         string `json:"prompt"`
	N              int    `json:"n"`
	Size           string `json:"size"`
	Quality        string `json:"quality,omitempty"`
	ResponseFormat string `json:"response_format,omitempty"`
	Style          string `json:"style,omitempty"`
}

OpenaiCreateImageRequest request to openai image api

func NewOpenaiCreateImageRequest

func NewOpenaiCreateImageRequest(prompt string) *OpenaiCreateImageRequest

NewOpenaiCreateImageRequest create new request

type OpenaiCreateImageResponse

type OpenaiCreateImageResponse struct {
	Created int64 `json:"created"`
	Data    []struct {
		Url     string `json:"url"`
		B64Json string `json:"b64_json"`
	} `json:"data"`
}

OpenaiCreateImageResponse return from openai image api

type OpenaiMessageRole

type OpenaiMessageRole string

OpenaiMessageRole message role

func (OpenaiMessageRole) String

func (r OpenaiMessageRole) String() string

String return string

type OpenaiReqMessage

type OpenaiReqMessage[T string | []OpenaiVisionMessageContent] struct {
	Role    OpenaiMessageRole `json:"role"`
	Content T                 `json:"content"`
}

OpenaiReqMessage request message to openai chat api

chat completion message and vision message have different content

type OpenaiVisionMessageContent

type OpenaiVisionMessageContent struct {
	Type     OpenaiVisionMessageContentType      `json:"type"`
	Text     string                              `json:"text,omitempty"`
	ImageUrl *OpenaiVisionMessageContentImageUrl `json:"image_url,omitempty"`
}

OpenaiVisionMessageContent vision message content

type OpenaiVisionMessageContentImageUrl

type OpenaiVisionMessageContentImageUrl struct {
	URL    string                `json:"url"`
	Detail VisionImageResolution `json:"detail,omitempty"`
}

OpenaiVisionMessageContentImageUrl image url

type OpenaiVisionMessageContentType

type OpenaiVisionMessageContentType string

OpenaiVisionMessageContentType vision message content type

const (
	// OpenaiVisionMessageContentTypeText text
	OpenaiVisionMessageContentTypeText OpenaiVisionMessageContentType = "text"
	// OpenaiVisionMessageContentTypeImageUrl image url
	OpenaiVisionMessageContentTypeImageUrl OpenaiVisionMessageContentType = "image_url"
)

type UserQueryType

type UserQueryType string

UserQueryType user query type

const (
	// UserQueryTypeSearch search by embeddings chunks
	UserQueryTypeSearch UserQueryType = "search"
	// UserQueryTypeScan scan by map-reduce
	UserQueryTypeScan UserQueryType = "scan"
)

type VisionImageResolution

type VisionImageResolution string

VisionImageResolution image resolution

const (
	// VisionImageResolutionLow low resolution
	VisionImageResolutionLow VisionImageResolution = "low"
	// VisionImageResolutionHigh high resolution
	VisionImageResolutionHigh VisionImageResolution = "high"
)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL