Przeglądaj źródła

fix(response): tools 需要处理的参数很少 使用 map

Xyfacai 5 miesięcy temu
rodzic
commit
c674c3561a

+ 42 - 39
dto/openai_request.go

@@ -603,26 +603,29 @@ type WebSearchOptions struct {
 	UserLocation      json.RawMessage `json:"user_location,omitempty"`
 }
 
+// https://platform.openai.com/docs/api-reference/responses/create
 type OpenAIResponsesRequest struct {
-	Model              string               `json:"model"`
-	Input              json.RawMessage      `json:"input,omitempty"`
-	Include            json.RawMessage      `json:"include,omitempty"`
-	Instructions       json.RawMessage      `json:"instructions,omitempty"`
-	MaxOutputTokens    uint                 `json:"max_output_tokens,omitempty"`
-	Metadata           json.RawMessage      `json:"metadata,omitempty"`
-	ParallelToolCalls  bool                 `json:"parallel_tool_calls,omitempty"`
-	PreviousResponseID string               `json:"previous_response_id,omitempty"`
-	Reasoning          *Reasoning           `json:"reasoning,omitempty"`
-	ServiceTier        string               `json:"service_tier,omitempty"`
-	Store              bool                 `json:"store,omitempty"`
-	Stream             bool                 `json:"stream,omitempty"`
-	Temperature        float64              `json:"temperature,omitempty"`
-	Text               json.RawMessage      `json:"text,omitempty"`
-	ToolChoice         json.RawMessage      `json:"tool_choice,omitempty"`
-	Tools              []ResponsesToolsCall `json:"tools,omitempty"`
-	TopP               float64              `json:"top_p,omitempty"`
-	Truncation         string               `json:"truncation,omitempty"`
-	User               string               `json:"user,omitempty"`
+	Model              string           `json:"model"`
+	Input              json.RawMessage  `json:"input,omitempty"`
+	Include            json.RawMessage  `json:"include,omitempty"`
+	Instructions       json.RawMessage  `json:"instructions,omitempty"`
+	MaxOutputTokens    uint             `json:"max_output_tokens,omitempty"`
+	Metadata           json.RawMessage  `json:"metadata,omitempty"`
+	ParallelToolCalls  bool             `json:"parallel_tool_calls,omitempty"`
+	PreviousResponseID string           `json:"previous_response_id,omitempty"`
+	Reasoning          *Reasoning       `json:"reasoning,omitempty"`
+	ServiceTier        string           `json:"service_tier,omitempty"`
+	Store              bool             `json:"store,omitempty"`
+	Stream             bool             `json:"stream,omitempty"`
+	Temperature        float64          `json:"temperature,omitempty"`
+	Text               json.RawMessage  `json:"text,omitempty"`
+	ToolChoice         json.RawMessage  `json:"tool_choice,omitempty"`
+	Tools              []map[string]any `json:"tools,omitempty"` // 需要处理的参数很少,MCP 参数太多不确定,所以用 map
+	TopP               float64          `json:"top_p,omitempty"`
+	Truncation         string           `json:"truncation,omitempty"`
+	User               string           `json:"user,omitempty"`
+	MaxToolCalls       uint             `json:"max_tool_calls,omitempty"`
+	Prompt             json.RawMessage  `json:"prompt,omitempty"`
 }
 
 type Reasoning struct {
@@ -630,23 +633,23 @@ type Reasoning struct {
 	Summary string `json:"summary,omitempty"`
 }
 
-type ResponsesToolsCall struct {
-	Type string `json:"type"`
-	// Web Search
-	UserLocation      json.RawMessage `json:"user_location,omitempty"`
-	SearchContextSize string          `json:"search_context_size,omitempty"`
-	// File Search
-	VectorStoreIds []string        `json:"vector_store_ids,omitempty"`
-	MaxNumResults  uint            `json:"max_num_results,omitempty"`
-	Filters        json.RawMessage `json:"filters,omitempty"`
-	// Computer Use
-	DisplayWidth  uint   `json:"display_width,omitempty"`
-	DisplayHeight uint   `json:"display_height,omitempty"`
-	Environment   string `json:"environment,omitempty"`
-	// Function
-	Name        string          `json:"name,omitempty"`
-	Description string          `json:"description,omitempty"`
-	Parameters  json.RawMessage `json:"parameters,omitempty"`
-	Function    json.RawMessage `json:"function,omitempty"`
-	Container   json.RawMessage `json:"container,omitempty"`
-}
+//type ResponsesToolsCall struct {
+//	Type string `json:"type"`
+//	// Web Search
+//	UserLocation      json.RawMessage `json:"user_location,omitempty"`
+//	SearchContextSize string          `json:"search_context_size,omitempty"`
+//	// File Search
+//	VectorStoreIds []string        `json:"vector_store_ids,omitempty"`
+//	MaxNumResults  uint            `json:"max_num_results,omitempty"`
+//	Filters        json.RawMessage `json:"filters,omitempty"`
+//	// Computer Use
+//	DisplayWidth  uint   `json:"display_width,omitempty"`
+//	DisplayHeight uint   `json:"display_height,omitempty"`
+//	Environment   string `json:"environment,omitempty"`
+//	// Function
+//	Name        string          `json:"name,omitempty"`
+//	Description string          `json:"description,omitempty"`
+//	Parameters  json.RawMessage `json:"parameters,omitempty"`
+//	Function    json.RawMessage `json:"function,omitempty"`
+//	Container   json.RawMessage `json:"container,omitempty"`
+//}

+ 22 - 22
dto/openai_response.go

@@ -200,28 +200,28 @@ type OutputTokenDetails struct {
 }
 
 type OpenAIResponsesResponse struct {
-	ID                 string               `json:"id"`
-	Object             string               `json:"object"`
-	CreatedAt          int                  `json:"created_at"`
-	Status             string               `json:"status"`
-	Error              *types.OpenAIError   `json:"error,omitempty"`
-	IncompleteDetails  *IncompleteDetails   `json:"incomplete_details,omitempty"`
-	Instructions       string               `json:"instructions"`
-	MaxOutputTokens    int                  `json:"max_output_tokens"`
-	Model              string               `json:"model"`
-	Output             []ResponsesOutput    `json:"output"`
-	ParallelToolCalls  bool                 `json:"parallel_tool_calls"`
-	PreviousResponseID string               `json:"previous_response_id"`
-	Reasoning          *Reasoning           `json:"reasoning"`
-	Store              bool                 `json:"store"`
-	Temperature        float64              `json:"temperature"`
-	ToolChoice         string               `json:"tool_choice"`
-	Tools              []ResponsesToolsCall `json:"tools"`
-	TopP               float64              `json:"top_p"`
-	Truncation         string               `json:"truncation"`
-	Usage              *Usage               `json:"usage"`
-	User               json.RawMessage      `json:"user"`
-	Metadata           json.RawMessage      `json:"metadata"`
+	ID                 string             `json:"id"`
+	Object             string             `json:"object"`
+	CreatedAt          int                `json:"created_at"`
+	Status             string             `json:"status"`
+	Error              *types.OpenAIError `json:"error,omitempty"`
+	IncompleteDetails  *IncompleteDetails `json:"incomplete_details,omitempty"`
+	Instructions       string             `json:"instructions"`
+	MaxOutputTokens    int                `json:"max_output_tokens"`
+	Model              string             `json:"model"`
+	Output             []ResponsesOutput  `json:"output"`
+	ParallelToolCalls  bool               `json:"parallel_tool_calls"`
+	PreviousResponseID string             `json:"previous_response_id"`
+	Reasoning          *Reasoning         `json:"reasoning"`
+	Store              bool               `json:"store"`
+	Temperature        float64            `json:"temperature"`
+	ToolChoice         string             `json:"tool_choice"`
+	Tools              []map[string]any   `json:"tools"`
+	TopP               float64            `json:"top_p"`
+	Truncation         string             `json:"truncation"`
+	Usage              *Usage             `json:"usage"`
+	User               json.RawMessage    `json:"user"`
+	Metadata           json.RawMessage    `json:"metadata"`
 }
 
 type IncompleteDetails struct {

+ 1 - 1
relay/channel/openai/relay_responses.go

@@ -42,7 +42,7 @@ func OaiResponsesHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http
 	usage.TotalTokens = responsesResponse.Usage.TotalTokens
 	// 解析 Tools 用量
 	for _, tool := range responsesResponse.Tools {
-		info.ResponsesUsageInfo.BuiltInTools[tool.Type].CallCount++
+		info.ResponsesUsageInfo.BuiltInTools[common.Interface2String(tool["type"])].CallCount++
 	}
 	return &usage, nil
 }

+ 8 - 6
relay/common/relay_info.go

@@ -180,16 +180,18 @@ func GenRelayInfoResponses(c *gin.Context, req *dto.OpenAIResponsesRequest) *Rel
 	}
 	if len(req.Tools) > 0 {
 		for _, tool := range req.Tools {
-			info.ResponsesUsageInfo.BuiltInTools[tool.Type] = &BuildInToolInfo{
-				ToolName:  tool.Type,
+			toolType := common.Interface2String(tool["type"])
+			info.ResponsesUsageInfo.BuiltInTools[toolType] = &BuildInToolInfo{
+				ToolName:  toolType,
 				CallCount: 0,
 			}
-			switch tool.Type {
+			switch toolType {
 			case dto.BuildInToolWebSearchPreview:
-				if tool.SearchContextSize == "" {
-					tool.SearchContextSize = "medium"
+				searchContextSize := common.Interface2String(tool["search_context_size"])
+				if searchContextSize == "" {
+					searchContextSize = "medium"
 				}
-				info.ResponsesUsageInfo.BuiltInTools[tool.Type].SearchContextSize = tool.SearchContextSize
+				info.ResponsesUsageInfo.BuiltInTools[toolType].SearchContextSize = searchContextSize
 			}
 		}
 	}