| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414 |
- package dto
- import (
- "encoding/json"
- "fmt"
- "github.com/QuantumNous/new-api/types"
- )
- const (
- ResponsesOutputTypeImageGenerationCall = "image_generation_call"
- )
- type SimpleResponse struct {
- Usage `json:"usage"`
- Error any `json:"error"`
- }
- // GetOpenAIError 从动态错误类型中提取OpenAIError结构
- func (s *SimpleResponse) GetOpenAIError() *types.OpenAIError {
- return GetOpenAIError(s.Error)
- }
- type TextResponse struct {
- Id string `json:"id"`
- Object string `json:"object"`
- Created int64 `json:"created"`
- Model string `json:"model"`
- Choices []OpenAITextResponseChoice `json:"choices"`
- Usage `json:"usage"`
- }
- type OpenAITextResponseChoice struct {
- Index int `json:"index"`
- Message `json:"message"`
- FinishReason string `json:"finish_reason"`
- }
- type OpenAITextResponse struct {
- Id string `json:"id"`
- Model string `json:"model"`
- Object string `json:"object"`
- Created any `json:"created"`
- Choices []OpenAITextResponseChoice `json:"choices"`
- Error any `json:"error,omitempty"`
- Usage `json:"usage"`
- }
- // GetOpenAIError 从动态错误类型中提取OpenAIError结构
- func (o *OpenAITextResponse) GetOpenAIError() *types.OpenAIError {
- return GetOpenAIError(o.Error)
- }
- type OpenAIEmbeddingResponseItem struct {
- Object string `json:"object"`
- Index int `json:"index"`
- Embedding []float64 `json:"embedding"`
- }
- type OpenAIEmbeddingResponse struct {
- Object string `json:"object"`
- Data []OpenAIEmbeddingResponseItem `json:"data"`
- Model string `json:"model"`
- Usage `json:"usage"`
- }
- type FlexibleEmbeddingResponseItem struct {
- Object string `json:"object"`
- Index int `json:"index"`
- Embedding any `json:"embedding"`
- }
- type FlexibleEmbeddingResponse struct {
- Object string `json:"object"`
- Data []FlexibleEmbeddingResponseItem `json:"data"`
- Model string `json:"model"`
- Usage `json:"usage"`
- }
- type ChatCompletionsStreamResponseChoice struct {
- Delta ChatCompletionsStreamResponseChoiceDelta `json:"delta,omitempty"`
- Logprobs *any `json:"logprobs"`
- FinishReason *string `json:"finish_reason"`
- Index int `json:"index"`
- }
- type ChatCompletionsStreamResponseChoiceDelta struct {
- Content *string `json:"content,omitempty"`
- ReasoningContent *string `json:"reasoning_content,omitempty"`
- Reasoning *string `json:"reasoning,omitempty"`
- Role string `json:"role,omitempty"`
- ToolCalls []ToolCallResponse `json:"tool_calls,omitempty"`
- }
- func (c *ChatCompletionsStreamResponseChoiceDelta) SetContentString(s string) {
- c.Content = &s
- }
- func (c *ChatCompletionsStreamResponseChoiceDelta) GetContentString() string {
- if c.Content == nil {
- return ""
- }
- return *c.Content
- }
- func (c *ChatCompletionsStreamResponseChoiceDelta) GetReasoningContent() string {
- if c.ReasoningContent == nil && c.Reasoning == nil {
- return ""
- }
- if c.ReasoningContent != nil {
- return *c.ReasoningContent
- }
- return *c.Reasoning
- }
- func (c *ChatCompletionsStreamResponseChoiceDelta) SetReasoningContent(s string) {
- c.ReasoningContent = &s
- //c.Reasoning = &s
- }
- type ToolCallResponse struct {
- // Index is not nil only in chat completion chunk object
- Index *int `json:"index,omitempty"`
- ID string `json:"id,omitempty"`
- Type any `json:"type"`
- Function FunctionResponse `json:"function"`
- }
- func (c *ToolCallResponse) SetIndex(i int) {
- c.Index = &i
- }
- type FunctionResponse struct {
- Description string `json:"description,omitempty"`
- Name string `json:"name,omitempty"`
- // call function with arguments in JSON format
- Parameters any `json:"parameters,omitempty"` // request
- Arguments string `json:"arguments"` // response
- }
- type ChatCompletionsStreamResponse struct {
- Id string `json:"id"`
- Object string `json:"object"`
- Created int64 `json:"created"`
- Model string `json:"model"`
- SystemFingerprint *string `json:"system_fingerprint"`
- Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
- Usage *Usage `json:"usage"`
- }
- func (c *ChatCompletionsStreamResponse) IsFinished() bool {
- if len(c.Choices) == 0 {
- return false
- }
- return c.Choices[0].FinishReason != nil && *c.Choices[0].FinishReason != ""
- }
- func (c *ChatCompletionsStreamResponse) IsToolCall() bool {
- if len(c.Choices) == 0 {
- return false
- }
- return len(c.Choices[0].Delta.ToolCalls) > 0
- }
- func (c *ChatCompletionsStreamResponse) GetFirstToolCall() *ToolCallResponse {
- if c.IsToolCall() {
- return &c.Choices[0].Delta.ToolCalls[0]
- }
- return nil
- }
- func (c *ChatCompletionsStreamResponse) ClearToolCalls() {
- if !c.IsToolCall() {
- return
- }
- for choiceIdx := range c.Choices {
- for callIdx := range c.Choices[choiceIdx].Delta.ToolCalls {
- c.Choices[choiceIdx].Delta.ToolCalls[callIdx].ID = ""
- c.Choices[choiceIdx].Delta.ToolCalls[callIdx].Type = nil
- c.Choices[choiceIdx].Delta.ToolCalls[callIdx].Function.Name = ""
- }
- }
- }
- func (c *ChatCompletionsStreamResponse) Copy() *ChatCompletionsStreamResponse {
- choices := make([]ChatCompletionsStreamResponseChoice, len(c.Choices))
- copy(choices, c.Choices)
- return &ChatCompletionsStreamResponse{
- Id: c.Id,
- Object: c.Object,
- Created: c.Created,
- Model: c.Model,
- SystemFingerprint: c.SystemFingerprint,
- Choices: choices,
- Usage: c.Usage,
- }
- }
- func (c *ChatCompletionsStreamResponse) GetSystemFingerprint() string {
- if c.SystemFingerprint == nil {
- return ""
- }
- return *c.SystemFingerprint
- }
- func (c *ChatCompletionsStreamResponse) SetSystemFingerprint(s string) {
- c.SystemFingerprint = &s
- }
- type ChatCompletionsStreamResponseSimple struct {
- Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
- Usage *Usage `json:"usage"`
- }
- type CompletionsStreamResponse struct {
- Choices []struct {
- Text string `json:"text"`
- FinishReason string `json:"finish_reason"`
- } `json:"choices"`
- }
- type Usage struct {
- PromptTokens int `json:"prompt_tokens"`
- CompletionTokens int `json:"completion_tokens"`
- TotalTokens int `json:"total_tokens"`
- PromptCacheHitTokens int `json:"prompt_cache_hit_tokens,omitempty"`
- PromptTokensDetails InputTokenDetails `json:"prompt_tokens_details"`
- CompletionTokenDetails OutputTokenDetails `json:"completion_tokens_details"`
- InputTokens int `json:"input_tokens"`
- OutputTokens int `json:"output_tokens"`
- InputTokensDetails *InputTokenDetails `json:"input_tokens_details"`
- // claude cache 1h
- ClaudeCacheCreation5mTokens int `json:"claude_cache_creation_5_m_tokens"`
- ClaudeCacheCreation1hTokens int `json:"claude_cache_creation_1_h_tokens"`
- // OpenRouter Params
- Cost any `json:"cost,omitempty"`
- }
- type OpenAIVideoResponse struct {
- Id string `json:"id" example:"file-abc123"`
- Object string `json:"object" example:"file"`
- Bytes int64 `json:"bytes" example:"120000"`
- CreatedAt int64 `json:"created_at" example:"1677610602"`
- ExpiresAt int64 `json:"expires_at" example:"1677614202"`
- Filename string `json:"filename" example:"mydata.jsonl"`
- Purpose string `json:"purpose" example:"fine-tune"`
- }
- type InputTokenDetails struct {
- CachedTokens int `json:"cached_tokens"`
- CachedCreationTokens int `json:"-"`
- TextTokens int `json:"text_tokens"`
- AudioTokens int `json:"audio_tokens"`
- ImageTokens int `json:"image_tokens"`
- }
- type OutputTokenDetails struct {
- TextTokens int `json:"text_tokens"`
- AudioTokens int `json:"audio_tokens"`
- ReasoningTokens int `json:"reasoning_tokens"`
- }
- type OpenAIResponsesResponse struct {
- ID string `json:"id"`
- Object string `json:"object"`
- CreatedAt int `json:"created_at"`
- Status string `json:"status"`
- Error any `json:"error,omitempty"`
- IncompleteDetails *IncompleteDetails `json:"incomplete_details,omitempty"`
- Instructions string `json:"instructions"`
- MaxOutputTokens int `json:"max_output_tokens"`
- Model string `json:"model"`
- Output []ResponsesOutput `json:"output"`
- ParallelToolCalls bool `json:"parallel_tool_calls"`
- PreviousResponseID string `json:"previous_response_id"`
- Reasoning *Reasoning `json:"reasoning"`
- Store bool `json:"store"`
- Temperature float64 `json:"temperature"`
- ToolChoice string `json:"tool_choice"`
- Tools []map[string]any `json:"tools"`
- TopP float64 `json:"top_p"`
- Truncation string `json:"truncation"`
- Usage *Usage `json:"usage"`
- User json.RawMessage `json:"user"`
- Metadata json.RawMessage `json:"metadata"`
- }
- // GetOpenAIError 从动态错误类型中提取OpenAIError结构
- func (o *OpenAIResponsesResponse) GetOpenAIError() *types.OpenAIError {
- return GetOpenAIError(o.Error)
- }
- func (o *OpenAIResponsesResponse) HasImageGenerationCall() bool {
- if len(o.Output) == 0 {
- return false
- }
- for _, output := range o.Output {
- if output.Type == ResponsesOutputTypeImageGenerationCall {
- return true
- }
- }
- return false
- }
- func (o *OpenAIResponsesResponse) GetQuality() string {
- if len(o.Output) == 0 {
- return ""
- }
- for _, output := range o.Output {
- if output.Type == ResponsesOutputTypeImageGenerationCall {
- return output.Quality
- }
- }
- return ""
- }
- func (o *OpenAIResponsesResponse) GetSize() string {
- if len(o.Output) == 0 {
- return ""
- }
- for _, output := range o.Output {
- if output.Type == ResponsesOutputTypeImageGenerationCall {
- return output.Size
- }
- }
- return ""
- }
- type IncompleteDetails struct {
- Reasoning string `json:"reasoning"`
- }
- type ResponsesOutput struct {
- Type string `json:"type"`
- ID string `json:"id"`
- Status string `json:"status"`
- Role string `json:"role"`
- Content []ResponsesOutputContent `json:"content"`
- Quality string `json:"quality"`
- Size string `json:"size"`
- }
- type ResponsesOutputContent struct {
- Type string `json:"type"`
- Text string `json:"text"`
- Annotations []interface{} `json:"annotations"`
- }
- const (
- BuildInToolWebSearchPreview = "web_search_preview"
- BuildInToolFileSearch = "file_search"
- )
- const (
- BuildInCallWebSearchCall = "web_search_call"
- )
- const (
- ResponsesOutputTypeItemAdded = "response.output_item.added"
- ResponsesOutputTypeItemDone = "response.output_item.done"
- )
- // ResponsesStreamResponse 用于处理 /v1/responses 流式响应
- type ResponsesStreamResponse struct {
- Type string `json:"type"`
- Response *OpenAIResponsesResponse `json:"response,omitempty"`
- Delta string `json:"delta,omitempty"`
- Item *ResponsesOutput `json:"item,omitempty"`
- }
- // GetOpenAIError 从动态错误类型中提取OpenAIError结构
- func GetOpenAIError(errorField any) *types.OpenAIError {
- if errorField == nil {
- return nil
- }
- switch err := errorField.(type) {
- case types.OpenAIError:
- return &err
- case *types.OpenAIError:
- return err
- case map[string]interface{}:
- // 处理从JSON解析来的map结构
- openaiErr := &types.OpenAIError{}
- if errType, ok := err["type"].(string); ok {
- openaiErr.Type = errType
- }
- if errMsg, ok := err["message"].(string); ok {
- openaiErr.Message = errMsg
- }
- if errParam, ok := err["param"].(string); ok {
- openaiErr.Param = errParam
- }
- if errCode, ok := err["code"]; ok {
- openaiErr.Code = errCode
- }
- return openaiErr
- case string:
- // 处理简单字符串错误
- return &types.OpenAIError{
- Type: "error",
- Message: err,
- }
- default:
- // 未知类型,尝试转换为字符串
- return &types.OpenAIError{
- Type: "unknown_error",
- Message: fmt.Sprintf("%v", err),
- }
- }
- }
|