Browse Source

Merge pull request #698 from RooVetGit/more_o3_mini_fixes

Make o3-mini work in glama
Matt Rubens 11 months ago
parent
commit
3d67133435
1 changed files with 30 additions and 21 deletions
  1. 30 21
      src/api/providers/glama.ts

+ 30 - 21
src/api/providers/glama.ts

@@ -72,28 +72,30 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
 			maxTokens = 8_192
 		}
 
+		const requestOptions: OpenAI.Chat.ChatCompletionCreateParams = {
+			model: this.getModel().id,
+			max_tokens: maxTokens,
+			messages: openAiMessages,
+			stream: true,
+		}
+
+		if (this.supportsTemperature()) {
+			requestOptions.temperature = 0
+		}
+
 		const { data: completion, response } = await this.client.chat.completions
-			.create(
-				{
-					model: this.getModel().id,
-					max_tokens: maxTokens,
-					temperature: 0,
-					messages: openAiMessages,
-					stream: true,
-				},
-				{
-					headers: {
-						"X-Glama-Metadata": JSON.stringify({
-							labels: [
-								{
-									key: "app",
-									value: "vscode.rooveterinaryinc.roo-cline",
-								},
-							],
-						}),
-					},
+			.create(requestOptions, {
+				headers: {
+					"X-Glama-Metadata": JSON.stringify({
+						labels: [
+							{
+								key: "app",
+								value: "vscode.rooveterinaryinc.roo-cline",
+							},
+						],
+					}),
 				},
-			)
+			})
 			.withResponse()
 
 		const completionRequestId = response.headers.get("x-completion-request-id")
@@ -148,6 +150,10 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
 		}
 	}
 
+	private supportsTemperature(): boolean {
+		return !this.getModel().id.startsWith("openai/o3-mini")
+	}
+
 	getModel(): { id: string; info: ModelInfo } {
 		const modelId = this.options.glamaModelId
 		const modelInfo = this.options.glamaModelInfo
@@ -164,7 +170,10 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
 			const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
 				model: this.getModel().id,
 				messages: [{ role: "user", content: prompt }],
-				temperature: 0,
+			}
+
+			if (this.supportsTemperature()) {
+				requestOptions.temperature = 0
 			}
 
 			if (this.getModel().id.startsWith("anthropic/")) {