2
0
Эх сурвалжийг харах

fix: make defaultTemperature required in getModelParams to prevent silent temperature overrides (#11218)

* fix: DeepSeek temperature defaulting to 0 instead of 0.3

Pass defaultTemperature: DEEP_SEEK_DEFAULT_TEMPERATURE to getModelParams() in
DeepSeekHandler.getModel() to ensure the correct default temperature (0.3)
is used when no user configuration is provided.

Closes #11194

* refactor: make defaultTemperature required in getModelParams

Make the defaultTemperature parameter required in getModelParams() instead
of defaulting to 0. This prevents providers with their own non-zero default
temperature (like DeepSeek's 0.3) from being silently overridden by the
implicit 0 default.

Every provider now explicitly declares its temperature default, making the
temperature resolution chain clear:
  user setting → model default → provider default

---------

Co-authored-by: Roo Code <[email protected]>
Co-authored-by: daniel-lxs <[email protected]>
roomote[bot] 6 өдөр өмнө
parent
commit
0e5407aa76

+ 15 - 1
src/api/providers/__tests__/deepseek.spec.ts

@@ -25,7 +25,7 @@ vi.mock("@ai-sdk/deepseek", () => ({
 
 import type { Anthropic } from "@anthropic-ai/sdk"
 
-import { deepSeekDefaultModelId, type ModelInfo } from "@roo-code/types"
+import { deepSeekDefaultModelId, DEEP_SEEK_DEFAULT_TEMPERATURE, type ModelInfo } from "@roo-code/types"
 
 import type { ApiHandlerOptions } from "../../../shared/api"
 
@@ -155,6 +155,20 @@ describe("DeepSeekHandler", () => {
 			expect(model).toHaveProperty("temperature")
 			expect(model).toHaveProperty("maxTokens")
 		})
+
+		it("should use DEEP_SEEK_DEFAULT_TEMPERATURE as the default temperature", () => {
+			const model = handler.getModel()
+			expect(model.temperature).toBe(DEEP_SEEK_DEFAULT_TEMPERATURE)
+		})
+
+		it("should respect user-provided temperature over DEEP_SEEK_DEFAULT_TEMPERATURE", () => {
+			const handlerWithTemp = new DeepSeekHandler({
+				...mockOptions,
+				modelTemperature: 0.9,
+			})
+			const model = handlerWithTemp.getModel()
+			expect(model.temperature).toBe(0.9)
+		})
 	})
 
 	describe("createMessage", () => {

+ 7 - 1
src/api/providers/anthropic-vertex.ts

@@ -231,7 +231,13 @@ export class AnthropicVertexHandler extends BaseProvider implements SingleComple
 			}
 		}
 
-		const params = getModelParams({ format: "anthropic", modelId: id, model: info, settings: this.options })
+		const params = getModelParams({
+			format: "anthropic",
+			modelId: id,
+			model: info,
+			settings: this.options,
+			defaultTemperature: 0,
+		})
 
 		// Build betas array for request headers
 		const betas: string[] = []

+ 1 - 0
src/api/providers/anthropic.ts

@@ -358,6 +358,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
 			modelId: id,
 			model: info,
 			settings: this.options,
+			defaultTemperature: 0,
 		})
 
 		// The `:thinking` suffix indicates that the model is a "Hybrid"

+ 7 - 1
src/api/providers/cerebras.ts

@@ -49,7 +49,13 @@ export class CerebrasHandler extends BaseProvider implements SingleCompletionHan
 	override getModel(): { id: string; info: ModelInfo; maxTokens?: number; temperature?: number } {
 		const id = (this.options.apiModelId ?? cerebrasDefaultModelId) as CerebrasModelId
 		const info = cerebrasModels[id as keyof typeof cerebrasModels] || cerebrasModels[cerebrasDefaultModelId]
-		const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
+		const params = getModelParams({
+			format: "openai",
+			modelId: id,
+			model: info,
+			settings: this.options,
+			defaultTemperature: CEREBRAS_DEFAULT_TEMPERATURE,
+		})
 		return { id, info, ...params }
 	}
 

+ 1 - 0
src/api/providers/deepinfra.ts

@@ -47,6 +47,7 @@ export class DeepInfraHandler extends RouterProvider implements SingleCompletion
 			modelId: id,
 			model: info,
 			settings: this.options,
+			defaultTemperature: 0,
 		})
 
 		return { id, info, ...params }

+ 7 - 1
src/api/providers/deepseek.ts

@@ -43,7 +43,13 @@ export class DeepSeekHandler extends BaseProvider implements SingleCompletionHan
 	override getModel(): { id: string; info: ModelInfo; maxTokens?: number; temperature?: number } {
 		const id = this.options.apiModelId ?? deepSeekDefaultModelId
 		const info = deepSeekModels[id as keyof typeof deepSeekModels] || deepSeekModels[deepSeekDefaultModelId]
-		const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
+		const params = getModelParams({
+			format: "openai",
+			modelId: id,
+			model: info,
+			settings: this.options,
+			defaultTemperature: DEEP_SEEK_DEFAULT_TEMPERATURE,
+		})
 		return { id, info, ...params }
 	}
 

+ 7 - 1
src/api/providers/doubao.ts

@@ -64,7 +64,13 @@ export class DoubaoHandler extends OpenAiHandler {
 	override getModel() {
 		const id = this.options.apiModelId ?? doubaoDefaultModelId
 		const info = doubaoModels[id as keyof typeof doubaoModels] || doubaoModels[doubaoDefaultModelId]
-		const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
+		const params = getModelParams({
+			format: "openai",
+			modelId: id,
+			model: info,
+			settings: this.options,
+			defaultTemperature: 0,
+		})
 		return { id, info, ...params }
 	}
 

+ 7 - 1
src/api/providers/mistral.ts

@@ -55,7 +55,13 @@ export class MistralHandler extends BaseProvider implements SingleCompletionHand
 	override getModel(): { id: string; info: ModelInfo; maxTokens?: number; temperature?: number } {
 		const id = (this.options.apiModelId ?? mistralDefaultModelId) as MistralModelId
 		const info = mistralModels[id as keyof typeof mistralModels] || mistralModels[mistralDefaultModelId]
-		const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
+		const params = getModelParams({
+			format: "openai",
+			modelId: id,
+			model: info,
+			settings: this.options,
+			defaultTemperature: 0,
+		})
 		return { id, info, ...params }
 	}
 

+ 7 - 1
src/api/providers/moonshot.ts

@@ -29,7 +29,13 @@ export class MoonshotHandler extends OpenAICompatibleHandler {
 	override getModel() {
 		const id = this.options.apiModelId ?? moonshotDefaultModelId
 		const info = moonshotModels[id as keyof typeof moonshotModels] || moonshotModels[moonshotDefaultModelId]
-		const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
+		const params = getModelParams({
+			format: "openai",
+			modelId: id,
+			model: info,
+			settings: this.options,
+			defaultTemperature: 0,
+		})
 		return { id, info, ...params }
 	}
 

+ 7 - 1
src/api/providers/openai.ts

@@ -282,7 +282,13 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
 	override getModel() {
 		const id = this.options.openAiModelId ?? ""
 		const info: ModelInfo = this.options.openAiCustomModelInfo ?? openAiModelInfoSaneDefaults
-		const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
+		const params = getModelParams({
+			format: "openai",
+			modelId: id,
+			model: info,
+			settings: this.options,
+			defaultTemperature: 0,
+		})
 		return { id, info, ...params }
 	}
 

+ 1 - 0
src/api/providers/requesty.ts

@@ -89,6 +89,7 @@ export class RequestyHandler extends BaseProvider implements SingleCompletionHan
 			modelId: id,
 			model: info,
 			settings: this.options,
+			defaultTemperature: 0,
 		})
 
 		return { id, info, ...params }

+ 1 - 0
src/api/providers/unbound.ts

@@ -70,6 +70,7 @@ export class UnboundHandler extends RouterProvider implements SingleCompletionHa
 			modelId: id,
 			model: info,
 			settings: this.options,
+			defaultTemperature: 0,
 		})
 
 		return { id, info, ...params }

+ 9 - 1
src/api/transform/__tests__/model-params.spec.ts

@@ -17,16 +17,19 @@ describe("getModelParams", () => {
 	const anthropicParams = {
 		modelId: "test",
 		format: "anthropic" as const,
+		defaultTemperature: 0,
 	}
 
 	const openaiParams = {
 		modelId: "test",
 		format: "openai" as const,
+		defaultTemperature: 0,
 	}
 
 	const openrouterParams = {
 		modelId: "test",
 		format: "openrouter" as const,
+		defaultTemperature: 0,
 	}
 
 	describe("Basic functionality", () => {
@@ -48,11 +51,12 @@ describe("getModelParams", () => {
 			})
 		})
 
-		it("should use default temperature of 0 when no defaultTemperature is provided", () => {
+		it("should use the provided defaultTemperature when no user or model temperature is set", () => {
 			const result = getModelParams({
 				...anthropicParams,
 				settings: {},
 				model: baseModel,
+				defaultTemperature: 0,
 			})
 
 			expect(result.temperature).toBe(0)
@@ -193,6 +197,7 @@ describe("getModelParams", () => {
 				format: "openrouter" as const,
 				settings: {},
 				model: baseModel,
+				defaultTemperature: 0,
 			})
 
 			expect(result.maxTokens).toBe(ANTHROPIC_DEFAULT_MAX_TOKENS)
@@ -214,6 +219,7 @@ describe("getModelParams", () => {
 				format: "openrouter" as const,
 				settings: {},
 				model: baseModel,
+				defaultTemperature: 0,
 			})
 
 			expect(result.maxTokens).toBeUndefined()
@@ -374,6 +380,7 @@ describe("getModelParams", () => {
 					format: "gemini" as const,
 					settings: { modelMaxTokens: 2000, modelMaxThinkingTokens: 50 },
 					model,
+					defaultTemperature: 0,
 				}),
 			).toEqual({
 				format: "gemini",
@@ -400,6 +407,7 @@ describe("getModelParams", () => {
 					format: "openrouter" as const,
 					settings: { modelMaxTokens: 4000 },
 					model,
+					defaultTemperature: 0,
 				}),
 			).toEqual({
 				format: "openrouter",

+ 2 - 2
src/api/transform/model-params.ts

@@ -33,7 +33,7 @@ type GetModelParamsOptions<T extends Format> = {
 	modelId: string
 	model: ModelInfo
 	settings: ProviderSettings
-	defaultTemperature?: number
+	defaultTemperature: number
 }
 
 type BaseModelParams = {
@@ -77,7 +77,7 @@ export function getModelParams({
 	modelId,
 	model,
 	settings,
-	defaultTemperature = 0,
+	defaultTemperature,
 }: GetModelParamsOptions<Format>): ModelParams {
 	const {
 		modelMaxTokens: customMaxTokens,