Browse Source

fix: use anthropic protocol for token counting when using anthropic models via Vercel AI Gateway (#7433)

- Added condition in getApiProtocol to return 'anthropic' for vercel-ai-gateway when modelId starts with 'anthropic/'
- Added tests for Vercel AI Gateway provider protocol detection

This ensures proper token counting for Anthropic models accessed through Vercel AI Gateway, as Anthropic and OpenAI count tokens differently (Anthropic excludes cache tokens from input count, OpenAI includes them).
Daniel 4 months ago
parent
commit
ff1f4f0398

+ 20 - 0
packages/types/src/__tests__/provider-settings.test.ts

@@ -39,6 +39,26 @@ describe("getApiProtocol", () => {
 		})
 		})
 	})
 	})
 
 
+	describe("Vercel AI Gateway provider", () => {
+		it("should return 'anthropic' for vercel-ai-gateway provider with anthropic models", () => {
+			expect(getApiProtocol("vercel-ai-gateway", "anthropic/claude-3-opus")).toBe("anthropic")
+			expect(getApiProtocol("vercel-ai-gateway", "anthropic/claude-3.5-sonnet")).toBe("anthropic")
+			expect(getApiProtocol("vercel-ai-gateway", "ANTHROPIC/claude-sonnet-4")).toBe("anthropic")
+			expect(getApiProtocol("vercel-ai-gateway", "anthropic/claude-opus-4.1")).toBe("anthropic")
+		})
+
+		it("should return 'openai' for vercel-ai-gateway provider with non-anthropic models", () => {
+			expect(getApiProtocol("vercel-ai-gateway", "openai/gpt-4")).toBe("openai")
+			expect(getApiProtocol("vercel-ai-gateway", "google/gemini-pro")).toBe("openai")
+			expect(getApiProtocol("vercel-ai-gateway", "meta/llama-3")).toBe("openai")
+			expect(getApiProtocol("vercel-ai-gateway", "mistral/mixtral")).toBe("openai")
+		})
+
+		it("should return 'openai' for vercel-ai-gateway provider without model", () => {
+			expect(getApiProtocol("vercel-ai-gateway")).toBe("openai")
+		})
+	})
+
 	describe("Other providers", () => {
 	describe("Other providers", () => {
 		it("should return 'openai' for non-anthropic providers regardless of model", () => {
 		it("should return 'openai' for non-anthropic providers regardless of model", () => {
 			expect(getApiProtocol("openrouter", "claude-3-opus")).toBe("openai")
 			expect(getApiProtocol("openrouter", "claude-3-opus")).toBe("openai")

+ 5 - 0
packages/types/src/provider-settings.ts

@@ -453,6 +453,11 @@ export const getApiProtocol = (provider: ProviderName | undefined, modelId?: str
 		return "anthropic"
 		return "anthropic"
 	}
 	}
 
 
+	// Vercel AI Gateway uses anthropic protocol for anthropic models
+	if (provider && provider === "vercel-ai-gateway" && modelId && modelId.toLowerCase().startsWith("anthropic/")) {
+		return "anthropic"
+	}
+
 	return "openai"
 	return "openai"
 }
 }