Parcourir la source

feat: add MiniMax-M2-Stable model and enable prompt caching (#9072)

Co-authored-by: Roo Code <[email protected]>
Co-authored-by: Daniel <[email protected]>
roomote[bot] il y a 1 mois
Parent
commit
bf048492fb

+ 19 - 5
packages/types/src/providers/minimax.ts

@@ -1,8 +1,9 @@
 import type { ModelInfo } from "../model.js"
 
 // Minimax
-// https://www.minimax.io/platform/document/text_api_intro
-// https://www.minimax.io/platform/document/pricing
+// https://platform.minimax.io/docs/guides/pricing
+// https://platform.minimax.io/docs/api-reference/text-openai-api
+// https://platform.minimax.io/docs/api-reference/text-anthropic-api
 export type MinimaxModelId = keyof typeof minimaxModels
 export const minimaxDefaultModelId: MinimaxModelId = "MiniMax-M2"
 
@@ -11,15 +12,28 @@ export const minimaxModels = {
 		maxTokens: 16_384,
 		contextWindow: 192_000,
 		supportsImages: false,
-		supportsPromptCache: false,
+		supportsPromptCache: true,
 		inputPrice: 0.3,
 		outputPrice: 1.2,
-		cacheWritesPrice: 0,
-		cacheReadsPrice: 0,
+		cacheWritesPrice: 0.375,
+		cacheReadsPrice: 0.03,
 		preserveReasoning: true,
 		description:
 			"MiniMax M2, a model born for Agents and code, featuring Top-tier Coding Capabilities, Powerful Agentic Performance, and Ultimate Cost-Effectiveness & Speed.",
 	},
+	"MiniMax-M2-Stable": {
+		maxTokens: 16_384,
+		contextWindow: 192_000,
+		supportsImages: false,
+		supportsPromptCache: true,
+		inputPrice: 0.3,
+		outputPrice: 1.2,
+		cacheWritesPrice: 0.375,
+		cacheReadsPrice: 0.03,
+		preserveReasoning: true,
+		description:
+			"MiniMax M2 Stable (High Concurrency, Commercial Use), a model born for Agents and code, featuring Top-tier Coding Capabilities, Powerful Agentic Performance, and Ultimate Cost-Effectiveness & Speed.",
+	},
 } as const satisfies Record<string, ModelInfo>
 
 export const MINIMAX_DEFAULT_TEMPERATURE = 1.0

+ 34 - 2
src/api/providers/__tests__/minimax.spec.ts

@@ -82,7 +82,25 @@ describe("MiniMaxHandler", () => {
 			expect(model.info).toEqual(minimaxModels[testModelId])
 			expect(model.info.contextWindow).toBe(192_000)
 			expect(model.info.maxTokens).toBe(16_384)
-			expect(model.info.supportsPromptCache).toBe(false)
+			expect(model.info.supportsPromptCache).toBe(true)
+			expect(model.info.cacheWritesPrice).toBe(0.375)
+			expect(model.info.cacheReadsPrice).toBe(0.03)
+		})
+
+		it("should return MiniMax-M2-Stable model with correct configuration", () => {
+			const testModelId: MinimaxModelId = "MiniMax-M2-Stable"
+			const handlerWithModel = new MiniMaxHandler({
+				apiModelId: testModelId,
+				minimaxApiKey: "test-minimax-api-key",
+			})
+			const model = handlerWithModel.getModel()
+			expect(model.id).toBe(testModelId)
+			expect(model.info).toEqual(minimaxModels[testModelId])
+			expect(model.info.contextWindow).toBe(192_000)
+			expect(model.info.maxTokens).toBe(16_384)
+			expect(model.info.supportsPromptCache).toBe(true)
+			expect(model.info.cacheWritesPrice).toBe(0.375)
+			expect(model.info.cacheReadsPrice).toBe(0.03)
 		})
 	})
 
@@ -269,9 +287,23 @@ describe("MiniMaxHandler", () => {
 			expect(model.maxTokens).toBe(16_384)
 			expect(model.contextWindow).toBe(192_000)
 			expect(model.supportsImages).toBe(false)
-			expect(model.supportsPromptCache).toBe(false)
+			expect(model.supportsPromptCache).toBe(true)
+			expect(model.inputPrice).toBe(0.3)
+			expect(model.outputPrice).toBe(1.2)
+			expect(model.cacheWritesPrice).toBe(0.375)
+			expect(model.cacheReadsPrice).toBe(0.03)
+		})
+
+		it("should correctly configure MiniMax-M2-Stable model properties", () => {
+			const model = minimaxModels["MiniMax-M2-Stable"]
+			expect(model.maxTokens).toBe(16_384)
+			expect(model.contextWindow).toBe(192_000)
+			expect(model.supportsImages).toBe(false)
+			expect(model.supportsPromptCache).toBe(true)
 			expect(model.inputPrice).toBe(0.3)
 			expect(model.outputPrice).toBe(1.2)
+			expect(model.cacheWritesPrice).toBe(0.375)
+			expect(model.cacheReadsPrice).toBe(0.03)
 		})
 	})
 })