|
|
@@ -23,12 +23,22 @@ describe("OpenRouter API", () => {
|
|
|
|
|
|
const models = await getOpenRouterModels()
|
|
|
|
|
|
- expect(
|
|
|
- Object.entries(models)
|
|
|
- .filter(([_, model]) => model.supportsPromptCache)
|
|
|
- .map(([id, _]) => id)
|
|
|
- .sort(),
|
|
|
- ).toEqual(Array.from(OPEN_ROUTER_PROMPT_CACHING_MODELS).sort())
|
|
|
+ const openRouterSupportedCaching = Object.entries(models)
|
|
|
+ .filter(([_, model]) => model.supportsPromptCache)
|
|
|
+ .map(([id, _]) => id)
|
|
|
+
|
|
|
+ const ourCachingModels = Array.from(OPEN_ROUTER_PROMPT_CACHING_MODELS)
|
|
|
+
|
|
|
+ // Verify all our caching models are actually supported by OpenRouter
|
|
|
+ for (const modelId of ourCachingModels) {
|
|
|
+ expect(openRouterSupportedCaching).toContain(modelId)
|
|
|
+ }
|
|
|
+
|
|
|
+ // Verify we have all supported models except intentionally excluded ones
|
|
|
+ const excludedModels = new Set(["google/gemini-2.5-pro-preview"]) // Excluded due to lag issue (#4487)
|
|
|
+ const expectedCachingModels = openRouterSupportedCaching.filter((id) => !excludedModels.has(id)).sort()
|
|
|
+
|
|
|
+ expect(ourCachingModels.sort()).toEqual(expectedCachingModels)
|
|
|
|
|
|
expect(
|
|
|
Object.entries(models)
|