Просмотр исходного кода

Discards temperature setting on o3-mini for Unbound (#1836)

* Discards temperature setting on o3-mini for Unbound

* Adds changeset

---------

Co-authored-by: Pugazhendhi <[email protected]>
pugazhendhi-m 9 месяцев назад
Родитель
Сommit
e4c398d888

+ 7 - 0
.changeset/twenty-spoons-shout.md

@@ -0,0 +1,7 @@
+---
+"roo-cline": patch
+---
+
+Adds a function to add temperature setting based on the model id
+This is added because openai/o3-mini does not support temperature parameter which causes the request to fail.
+This update will allow users to use o3-mini on Unbound without facing any issues.

+ 32 - 0
src/api/providers/__tests__/unbound.test.ts

@@ -246,6 +246,38 @@ describe("UnboundHandler", () => {
 			)
 			expect(mockCreate.mock.calls[0][0]).not.toHaveProperty("max_tokens")
 		})
+
+		it("should not set temperature for openai/o3-mini", async () => {
+			mockCreate.mockClear()
+
+			const openaiOptions = {
+				apiModelId: "openai/o3-mini",
+				unboundApiKey: "test-key",
+				unboundModelId: "openai/o3-mini",
+				unboundModelInfo: {
+					maxTokens: undefined,
+					contextWindow: 128000,
+					supportsPromptCache: true,
+					inputPrice: 0.01,
+					outputPrice: 0.03,
+				},
+			}
+			const openaiHandler = new UnboundHandler(openaiOptions)
+
+			await openaiHandler.completePrompt("Test prompt")
+			expect(mockCreate).toHaveBeenCalledWith(
+				expect.objectContaining({
+					model: "o3-mini",
+					messages: [{ role: "user", content: "Test prompt" }],
+				}),
+				expect.objectContaining({
+					headers: expect.objectContaining({
+						"X-Unbound-Metadata": expect.stringContaining("roo-code"),
+					}),
+				}),
+			)
+			expect(mockCreate.mock.calls[0][0]).not.toHaveProperty("temperature")
+		})
 	})
 
 	describe("getModel", () => {

+ 30 - 21
src/api/providers/unbound.ts

@@ -25,6 +25,10 @@ export class UnboundHandler extends BaseProvider implements SingleCompletionHand
 		this.client = new OpenAI({ baseURL, apiKey })
 	}
 
+	private supportsTemperature(): boolean {
+		return !this.getModel().id.startsWith("openai/o3-mini")
+	}
+
 	override async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
 		// Convert Anthropic messages to OpenAI format
 		const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
@@ -78,28 +82,30 @@ export class UnboundHandler extends BaseProvider implements SingleCompletionHand
 			maxTokens = this.getModel().info.maxTokens
 		}
 
+		const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {
+			model: this.getModel().id.split("/")[1],
+			max_tokens: maxTokens,
+			messages: openAiMessages,
+			stream: true,
+		}
+
+		if (this.supportsTemperature()) {
+			requestOptions.temperature = this.options.modelTemperature ?? 0
+		}
+
 		const { data: completion, response } = await this.client.chat.completions
-			.create(
-				{
-					model: this.getModel().id.split("/")[1],
-					max_tokens: maxTokens,
-					temperature: this.options.modelTemperature ?? 0,
-					messages: openAiMessages,
-					stream: true,
-				},
-				{
-					headers: {
-						"X-Unbound-Metadata": JSON.stringify({
-							labels: [
-								{
-									key: "app",
-									value: "roo-code",
-								},
-							],
-						}),
-					},
+			.create(requestOptions, {
+				headers: {
+					"X-Unbound-Metadata": JSON.stringify({
+						labels: [
+							{
+								key: "app",
+								value: "roo-code",
+							},
+						],
+					}),
 				},
-			)
+			})
 			.withResponse()
 
 		for await (const chunk of completion) {
@@ -150,7 +156,10 @@ export class UnboundHandler extends BaseProvider implements SingleCompletionHand
 			const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
 				model: this.getModel().id.split("/")[1],
 				messages: [{ role: "user", content: prompt }],
-				temperature: this.options.modelTemperature ?? 0,
+			}
+
+			if (this.supportsTemperature()) {
+				requestOptions.temperature = this.options.modelTemperature ?? 0
 			}
 
 			if (this.getModel().id.startsWith("anthropic/")) {