Browse Source

Default to using native tools when supported on openrouter (#9878)

Matt Rubens 1 month ago
parent
commit
2eae32104e

+ 52 - 0
src/api/providers/fetchers/__tests__/openrouter.spec.ts

@@ -30,6 +30,7 @@ describe("OpenRouter API", () => {
 				supportsReasoningEffort: false,
 				supportsNativeTools: true,
 				supportedParameters: ["max_tokens", "temperature", "reasoning", "include_reasoning"],
+				defaultToolProtocol: "native",
 			})
 
 			expect(models["anthropic/claude-3.7-sonnet:thinking"]).toEqual({
@@ -47,6 +48,7 @@ describe("OpenRouter API", () => {
 				supportsReasoningEffort: true,
 				supportsNativeTools: true,
 				supportedParameters: ["max_tokens", "temperature", "reasoning", "include_reasoning"],
+				defaultToolProtocol: "native",
 			})
 
 			expect(models["google/gemini-2.5-flash-preview-05-20"].maxTokens).toEqual(65535)
@@ -390,5 +392,55 @@ describe("OpenRouter API", () => {
 			expect(textResult.maxTokens).toBe(64000)
 			expect(imageResult.maxTokens).toBe(64000)
 		})
+
+		it("sets defaultToolProtocol to native when model supports native tools", () => {
+			const mockModel = {
+				name: "Tools Model",
+				description: "Model with native tool support",
+				context_length: 128000,
+				max_completion_tokens: 8192,
+				pricing: {
+					prompt: "0.000003",
+					completion: "0.000015",
+				},
+			}
+
+			const resultWithTools = parseOpenRouterModel({
+				id: "test/tools-model",
+				model: mockModel,
+				inputModality: ["text"],
+				outputModality: ["text"],
+				maxTokens: 8192,
+				supportedParameters: ["tools", "max_tokens", "temperature"],
+			})
+
+			expect(resultWithTools.supportsNativeTools).toBe(true)
+			expect(resultWithTools.defaultToolProtocol).toBe("native")
+		})
+
+		it("does not set defaultToolProtocol when model does not support native tools", () => {
+			const mockModel = {
+				name: "No Tools Model",
+				description: "Model without native tool support",
+				context_length: 128000,
+				max_completion_tokens: 8192,
+				pricing: {
+					prompt: "0.000003",
+					completion: "0.000015",
+				},
+			}
+
+			const resultWithoutTools = parseOpenRouterModel({
+				id: "test/no-tools-model",
+				model: mockModel,
+				inputModality: ["text"],
+				outputModality: ["text"],
+				maxTokens: 8192,
+				supportedParameters: ["max_tokens", "temperature"],
+			})
+
+			expect(resultWithoutTools.supportsNativeTools).toBe(false)
+			expect(resultWithoutTools.defaultToolProtocol).toBeUndefined()
+		})
 	})
 })

+ 5 - 1
src/api/providers/fetchers/openrouter.ts

@@ -207,6 +207,8 @@ export const parseOpenRouterModel = ({
 
 	const supportsPromptCache = typeof cacheReadsPrice !== "undefined" // some models support caching but don't charge a cacheWritesPrice, e.g. GPT-5
 
+	const supportsNativeTools = supportedParameters ? supportedParameters.includes("tools") : undefined
+
 	const modelInfo: ModelInfo = {
 		maxTokens: maxTokens || Math.ceil(model.context_length * 0.2),
 		contextWindow: model.context_length,
@@ -218,8 +220,10 @@ export const parseOpenRouterModel = ({
 		cacheReadsPrice,
 		description: model.description,
 		supportsReasoningEffort: supportedParameters ? supportedParameters.includes("reasoning") : undefined,
-		supportsNativeTools: supportedParameters ? supportedParameters.includes("tools") : undefined,
+		supportsNativeTools,
 		supportedParameters: supportedParameters ? supportedParameters.filter(isModelParameter) : undefined,
+		// Default to native tool protocol when native tools are supported
+		defaultToolProtocol: supportsNativeTools ? ("native" as const) : undefined,
 	}
 
 	if (OPEN_ROUTER_REASONING_BUDGET_MODELS.has(id)) {