| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278 |
- package dto
- import (
- "encoding/json"
- "one-api/types"
- )
- type SimpleResponse struct {
- Usage `json:"usage"`
- Error *OpenAIError `json:"error"`
- }
- type TextResponse struct {
- Id string `json:"id"`
- Object string `json:"object"`
- Created int64 `json:"created"`
- Model string `json:"model"`
- Choices []OpenAITextResponseChoice `json:"choices"`
- Usage `json:"usage"`
- }
- type OpenAITextResponseChoice struct {
- Index int `json:"index"`
- Message `json:"message"`
- FinishReason string `json:"finish_reason"`
- }
- type OpenAITextResponse struct {
- Id string `json:"id"`
- Model string `json:"model"`
- Object string `json:"object"`
- Created any `json:"created"`
- Choices []OpenAITextResponseChoice `json:"choices"`
- Error *types.OpenAIError `json:"error,omitempty"`
- Usage `json:"usage"`
- }
- type OpenAIEmbeddingResponseItem struct {
- Object string `json:"object"`
- Index int `json:"index"`
- Embedding []float64 `json:"embedding"`
- }
- type OpenAIEmbeddingResponse struct {
- Object string `json:"object"`
- Data []OpenAIEmbeddingResponseItem `json:"data"`
- Model string `json:"model"`
- Usage `json:"usage"`
- }
- type FlexibleEmbeddingResponseItem struct {
- Object string `json:"object"`
- Index int `json:"index"`
- Embedding any `json:"embedding"`
- }
- type FlexibleEmbeddingResponse struct {
- Object string `json:"object"`
- Data []FlexibleEmbeddingResponseItem `json:"data"`
- Model string `json:"model"`
- Usage `json:"usage"`
- }
- type ChatCompletionsStreamResponseChoice struct {
- Delta ChatCompletionsStreamResponseChoiceDelta `json:"delta,omitempty"`
- Logprobs *any `json:"logprobs"`
- FinishReason *string `json:"finish_reason"`
- Index int `json:"index"`
- }
- type ChatCompletionsStreamResponseChoiceDelta struct {
- Content *string `json:"content,omitempty"`
- ReasoningContent *string `json:"reasoning_content,omitempty"`
- Reasoning *string `json:"reasoning,omitempty"`
- Role string `json:"role,omitempty"`
- ToolCalls []ToolCallResponse `json:"tool_calls,omitempty"`
- }
- func (c *ChatCompletionsStreamResponseChoiceDelta) SetContentString(s string) {
- c.Content = &s
- }
- func (c *ChatCompletionsStreamResponseChoiceDelta) GetContentString() string {
- if c.Content == nil {
- return ""
- }
- return *c.Content
- }
- func (c *ChatCompletionsStreamResponseChoiceDelta) GetReasoningContent() string {
- if c.ReasoningContent == nil && c.Reasoning == nil {
- return ""
- }
- if c.ReasoningContent != nil {
- return *c.ReasoningContent
- }
- return *c.Reasoning
- }
- func (c *ChatCompletionsStreamResponseChoiceDelta) SetReasoningContent(s string) {
- c.ReasoningContent = &s
- c.Reasoning = &s
- }
- type ToolCallResponse struct {
- // Index is not nil only in chat completion chunk object
- Index *int `json:"index,omitempty"`
- ID string `json:"id,omitempty"`
- Type any `json:"type"`
- Function FunctionResponse `json:"function"`
- }
- func (c *ToolCallResponse) SetIndex(i int) {
- c.Index = &i
- }
- type FunctionResponse struct {
- Description string `json:"description,omitempty"`
- Name string `json:"name,omitempty"`
- // call function with arguments in JSON format
- Parameters any `json:"parameters,omitempty"` // request
- Arguments string `json:"arguments"` // response
- }
- type ChatCompletionsStreamResponse struct {
- Id string `json:"id"`
- Object string `json:"object"`
- Created int64 `json:"created"`
- Model string `json:"model"`
- SystemFingerprint *string `json:"system_fingerprint"`
- Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
- Usage *Usage `json:"usage"`
- }
- func (c *ChatCompletionsStreamResponse) IsToolCall() bool {
- if len(c.Choices) == 0 {
- return false
- }
- return len(c.Choices[0].Delta.ToolCalls) > 0
- }
- func (c *ChatCompletionsStreamResponse) GetFirstToolCall() *ToolCallResponse {
- if c.IsToolCall() {
- return &c.Choices[0].Delta.ToolCalls[0]
- }
- return nil
- }
- func (c *ChatCompletionsStreamResponse) Copy() *ChatCompletionsStreamResponse {
- choices := make([]ChatCompletionsStreamResponseChoice, len(c.Choices))
- copy(choices, c.Choices)
- return &ChatCompletionsStreamResponse{
- Id: c.Id,
- Object: c.Object,
- Created: c.Created,
- Model: c.Model,
- SystemFingerprint: c.SystemFingerprint,
- Choices: choices,
- Usage: c.Usage,
- }
- }
- func (c *ChatCompletionsStreamResponse) GetSystemFingerprint() string {
- if c.SystemFingerprint == nil {
- return ""
- }
- return *c.SystemFingerprint
- }
- func (c *ChatCompletionsStreamResponse) SetSystemFingerprint(s string) {
- c.SystemFingerprint = &s
- }
- type ChatCompletionsStreamResponseSimple struct {
- Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
- Usage *Usage `json:"usage"`
- }
- type CompletionsStreamResponse struct {
- Choices []struct {
- Text string `json:"text"`
- FinishReason string `json:"finish_reason"`
- } `json:"choices"`
- }
- type Usage struct {
- PromptTokens int `json:"prompt_tokens"`
- CompletionTokens int `json:"completion_tokens"`
- TotalTokens int `json:"total_tokens"`
- PromptCacheHitTokens int `json:"prompt_cache_hit_tokens,omitempty"`
- PromptTokensDetails InputTokenDetails `json:"prompt_tokens_details"`
- CompletionTokenDetails OutputTokenDetails `json:"completion_tokens_details"`
- InputTokens int `json:"input_tokens"`
- OutputTokens int `json:"output_tokens"`
- InputTokensDetails *InputTokenDetails `json:"input_tokens_details"`
- // OpenRouter Params
- Cost any `json:"cost,omitempty"`
- }
- type InputTokenDetails struct {
- CachedTokens int `json:"cached_tokens"`
- CachedCreationTokens int `json:"-"`
- TextTokens int `json:"text_tokens"`
- AudioTokens int `json:"audio_tokens"`
- ImageTokens int `json:"image_tokens"`
- }
- type OutputTokenDetails struct {
- TextTokens int `json:"text_tokens"`
- AudioTokens int `json:"audio_tokens"`
- ReasoningTokens int `json:"reasoning_tokens"`
- }
- type OpenAIResponsesResponse struct {
- ID string `json:"id"`
- Object string `json:"object"`
- CreatedAt int `json:"created_at"`
- Status string `json:"status"`
- Error *types.OpenAIError `json:"error,omitempty"`
- IncompleteDetails *IncompleteDetails `json:"incomplete_details,omitempty"`
- Instructions string `json:"instructions"`
- MaxOutputTokens int `json:"max_output_tokens"`
- Model string `json:"model"`
- Output []ResponsesOutput `json:"output"`
- ParallelToolCalls bool `json:"parallel_tool_calls"`
- PreviousResponseID string `json:"previous_response_id"`
- Reasoning *Reasoning `json:"reasoning"`
- Store bool `json:"store"`
- Temperature float64 `json:"temperature"`
- ToolChoice string `json:"tool_choice"`
- Tools []map[string]any `json:"tools"`
- TopP float64 `json:"top_p"`
- Truncation string `json:"truncation"`
- Usage *Usage `json:"usage"`
- User json.RawMessage `json:"user"`
- Metadata json.RawMessage `json:"metadata"`
- }
- type IncompleteDetails struct {
- Reasoning string `json:"reasoning"`
- }
- type ResponsesOutput struct {
- Type string `json:"type"`
- ID string `json:"id"`
- Status string `json:"status"`
- Role string `json:"role"`
- Content []ResponsesOutputContent `json:"content"`
- }
- type ResponsesOutputContent struct {
- Type string `json:"type"`
- Text string `json:"text"`
- Annotations []interface{} `json:"annotations"`
- }
- const (
- BuildInToolWebSearchPreview = "web_search_preview"
- BuildInToolFileSearch = "file_search"
- )
- const (
- BuildInCallWebSearchCall = "web_search_call"
- )
- const (
- ResponsesOutputTypeItemAdded = "response.output_item.added"
- ResponsesOutputTypeItemDone = "response.output_item.done"
- )
- // ResponsesStreamResponse 用于处理 /v1/responses 流式响应
- type ResponsesStreamResponse struct {
- Type string `json:"type"`
- Response *OpenAIResponsesResponse `json:"response,omitempty"`
- Delta string `json:"delta,omitempty"`
- Item *ResponsesOutput `json:"item,omitempty"`
- }
|