Sfoglia il codice sorgente

Merge pull request #710 from hubutui/main

Fix temperature not being set to 0 due to json omitempty
Calcium-Ion 11 mesi fa
parent
commit
6cc9c36a22

+ 1 - 1
dto/openai_request.go

@@ -23,7 +23,7 @@ type GeneralOpenAIRequest struct {
 	MaxTokens           uint            `json:"max_tokens,omitempty"`
 	MaxCompletionTokens uint            `json:"max_completion_tokens,omitempty"`
 	ReasoningEffort     string          `json:"reasoning_effort,omitempty"`
-	Temperature         float64         `json:"temperature,omitempty"`
+	Temperature         *float64        `json:"temperature,omitempty"`
 	TopP                float64         `json:"top_p,omitempty"`
 	TopK                int             `json:"top_k,omitempty"`
 	Stop                any             `json:"stop,omitempty"`

+ 1 - 1
relay/channel/aws/dto.go

@@ -10,7 +10,7 @@ type AwsClaudeRequest struct {
 	System           string                 `json:"system,omitempty"`
 	Messages         []claude.ClaudeMessage `json:"messages"`
 	MaxTokens        uint                   `json:"max_tokens,omitempty"`
-	Temperature      float64                `json:"temperature,omitempty"`
+	Temperature      *float64               `json:"temperature,omitempty"`
 	TopP             float64                `json:"top_p,omitempty"`
 	TopK             int                    `json:"top_k,omitempty"`
 	StopSequences    []string               `json:"stop_sequences,omitempty"`

+ 1 - 1
relay/channel/baidu/dto.go

@@ -12,7 +12,7 @@ type BaiduMessage struct {
 
 type BaiduChatRequest struct {
 	Messages        []BaiduMessage `json:"messages"`
-	Temperature     float64        `json:"temperature,omitempty"`
+	Temperature     *float64       `json:"temperature,omitempty"`
 	TopP            float64        `json:"top_p,omitempty"`
 	PenaltyScore    float64        `json:"penalty_score,omitempty"`
 	Stream          bool           `json:"stream,omitempty"`

+ 1 - 1
relay/channel/claude/dto.go

@@ -50,7 +50,7 @@ type ClaudeRequest struct {
 	MaxTokens         uint            `json:"max_tokens,omitempty"`
 	MaxTokensToSample uint            `json:"max_tokens_to_sample,omitempty"`
 	StopSequences     []string        `json:"stop_sequences,omitempty"`
-	Temperature       float64         `json:"temperature,omitempty"`
+	Temperature       *float64        `json:"temperature,omitempty"`
 	TopP              float64         `json:"top_p,omitempty"`
 	TopK              int             `json:"top_k,omitempty"`
 	//ClaudeMetadata    `json:"metadata,omitempty"`

+ 1 - 1
relay/channel/cloudflare/dto.go

@@ -9,7 +9,7 @@ type CfRequest struct {
 	Prompt      string        `json:"prompt,omitempty"`
 	Raw         bool          `json:"raw,omitempty"`
 	Stream      bool          `json:"stream,omitempty"`
-	Temperature float64       `json:"temperature,omitempty"`
+	Temperature *float64      `json:"temperature,omitempty"`
 }
 
 type CfAudioResponse struct {

+ 1 - 1
relay/channel/gemini/dto.go

@@ -71,7 +71,7 @@ type GeminiChatTool struct {
 }
 
 type GeminiChatGenerationConfig struct {
-	Temperature      float64  `json:"temperature,omitempty"`
+	Temperature      *float64 `json:"temperature,omitempty"`
 	TopP             float64  `json:"topP,omitempty"`
 	TopK             float64  `json:"topK,omitempty"`
 	MaxOutputTokens  uint     `json:"maxOutputTokens,omitempty"`

+ 9 - 9
relay/channel/ollama/dto.go

@@ -6,7 +6,7 @@ type OllamaRequest struct {
 	Model            string         `json:"model,omitempty"`
 	Messages         []dto.Message  `json:"messages,omitempty"`
 	Stream           bool           `json:"stream,omitempty"`
-	Temperature      float64        `json:"temperature,omitempty"`
+	Temperature      *float64       `json:"temperature,omitempty"`
 	Seed             float64        `json:"seed,omitempty"`
 	Topp             float64        `json:"top_p,omitempty"`
 	TopK             int            `json:"top_k,omitempty"`
@@ -18,14 +18,14 @@ type OllamaRequest struct {
 }
 
 type Options struct {
-	Seed             int     `json:"seed,omitempty"`
-	Temperature      float64 `json:"temperature,omitempty"`
-	TopK             int     `json:"top_k,omitempty"`
-	TopP             float64 `json:"top_p,omitempty"`
-	FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
-	PresencePenalty  float64 `json:"presence_penalty,omitempty"`
-	NumPredict       int     `json:"num_predict,omitempty"`
-	NumCtx           int     `json:"num_ctx,omitempty"`
+	Seed             int      `json:"seed,omitempty"`
+	Temperature      *float64 `json:"temperature,omitempty"`
+	TopK             int      `json:"top_k,omitempty"`
+	TopP             float64  `json:"top_p,omitempty"`
+	FrequencyPenalty float64  `json:"frequency_penalty,omitempty"`
+	PresencePenalty  float64  `json:"presence_penalty,omitempty"`
+	NumPredict       int      `json:"num_predict,omitempty"`
+	NumCtx           int      `json:"num_ctx,omitempty"`
 }
 
 type OllamaEmbeddingRequest struct {

+ 1 - 1
relay/channel/palm/dto.go

@@ -18,7 +18,7 @@ type PaLMPrompt struct {
 
 type PaLMChatRequest struct {
 	Prompt         PaLMPrompt `json:"prompt"`
-	Temperature    float64    `json:"temperature,omitempty"`
+	Temperature    *float64   `json:"temperature,omitempty"`
 	CandidateCount int        `json:"candidateCount,omitempty"`
 	TopP           float64    `json:"topP,omitempty"`
 	TopK           uint       `json:"topK,omitempty"`

+ 1 - 3
relay/channel/tencent/relay-tencent.go

@@ -39,9 +39,7 @@ func requestOpenAI2Tencent(a *Adaptor, request dto.GeneralOpenAIRequest) *Tencen
 	if request.TopP != 0 {
 		req.TopP = &request.TopP
 	}
-	if request.Temperature != 0 {
-		req.Temperature = &request.Temperature
-	}
+	req.Temperature = request.Temperature
 	return &req
 }
 

+ 1 - 1
relay/channel/vertex/dto.go

@@ -9,7 +9,7 @@ type VertexAIClaudeRequest struct {
 	MaxTokens        int                    `json:"max_tokens,omitempty"`
 	StopSequences    []string               `json:"stop_sequences,omitempty"`
 	Stream           bool                   `json:"stream,omitempty"`
-	Temperature      float64                `json:"temperature,omitempty"`
+	Temperature      *float64               `json:"temperature,omitempty"`
 	TopP             float64                `json:"top_p,omitempty"`
 	TopK             int                    `json:"top_k,omitempty"`
 	Tools            []claude.Tool          `json:"tools,omitempty"`

+ 5 - 5
relay/channel/xunfei/dto.go

@@ -13,11 +13,11 @@ type XunfeiChatRequest struct {
 	} `json:"header"`
 	Parameter struct {
 		Chat struct {
-			Domain      string  `json:"domain,omitempty"`
-			Temperature float64 `json:"temperature,omitempty"`
-			TopK        int     `json:"top_k,omitempty"`
-			MaxTokens   uint    `json:"max_tokens,omitempty"`
-			Auditing    bool    `json:"auditing,omitempty"`
+			Domain      string   `json:"domain,omitempty"`
+			Temperature *float64 `json:"temperature,omitempty"`
+			TopK        int      `json:"top_k,omitempty"`
+			MaxTokens   uint     `json:"max_tokens,omitempty"`
+			Auditing    bool     `json:"auditing,omitempty"`
 		} `json:"chat"`
 	} `json:"parameter"`
 	Payload struct {

+ 1 - 1
relay/channel/zhipu/dto.go

@@ -12,7 +12,7 @@ type ZhipuMessage struct {
 
 type ZhipuRequest struct {
 	Prompt      []ZhipuMessage `json:"prompt"`
-	Temperature float64        `json:"temperature,omitempty"`
+	Temperature *float64       `json:"temperature,omitempty"`
 	TopP        float64        `json:"top_p,omitempty"`
 	RequestId   string         `json:"request_id,omitempty"`
 	Incremental bool           `json:"incremental,omitempty"`