openrouter.test.ts 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304
  1. // npx jest src/api/providers/__tests__/openrouter.test.ts
  2. import axios from "axios"
  3. import { Anthropic } from "@anthropic-ai/sdk"
  4. import OpenAI from "openai"
  5. import { OpenRouterHandler } from "../openrouter"
  6. import { ApiHandlerOptions, ModelInfo } from "../../../shared/api"
  7. // Mock dependencies
  8. jest.mock("openai")
  9. jest.mock("axios")
  10. jest.mock("delay", () => jest.fn(() => Promise.resolve()))
  11. const mockOpenRouterModelInfo: ModelInfo = {
  12. maxTokens: 1000,
  13. contextWindow: 2000,
  14. supportsPromptCache: false,
  15. inputPrice: 0.01,
  16. outputPrice: 0.02,
  17. }
  18. describe("OpenRouterHandler", () => {
  19. const mockOptions: ApiHandlerOptions = {
  20. openRouterApiKey: "test-key",
  21. openRouterModelId: "test-model",
  22. openRouterModelInfo: mockOpenRouterModelInfo,
  23. }
  24. beforeEach(() => {
  25. jest.clearAllMocks()
  26. })
  27. it("initializes with correct options", () => {
  28. const handler = new OpenRouterHandler(mockOptions)
  29. expect(handler).toBeInstanceOf(OpenRouterHandler)
  30. expect(OpenAI).toHaveBeenCalledWith({
  31. baseURL: "https://openrouter.ai/api/v1",
  32. apiKey: mockOptions.openRouterApiKey,
  33. defaultHeaders: {
  34. "HTTP-Referer": "https://github.com/RooVetGit/Roo-Cline",
  35. "X-Title": "Roo Code",
  36. },
  37. })
  38. })
  39. describe("getModel", () => {
  40. it("returns correct model info when options are provided", () => {
  41. const handler = new OpenRouterHandler(mockOptions)
  42. const result = handler.getModel()
  43. expect(result).toEqual({
  44. id: mockOptions.openRouterModelId,
  45. info: mockOptions.openRouterModelInfo,
  46. maxTokens: 1000,
  47. thinking: undefined,
  48. temperature: 0,
  49. reasoningEffort: undefined,
  50. topP: undefined,
  51. promptCache: {
  52. supported: false,
  53. optional: false,
  54. },
  55. })
  56. })
  57. it("returns default model info when options are not provided", () => {
  58. const handler = new OpenRouterHandler({})
  59. const result = handler.getModel()
  60. expect(result.id).toBe("anthropic/claude-3.7-sonnet")
  61. expect(result.info.supportsPromptCache).toBe(true)
  62. })
  63. it("honors custom maxTokens for thinking models", () => {
  64. const handler = new OpenRouterHandler({
  65. openRouterApiKey: "test-key",
  66. openRouterModelId: "test-model",
  67. openRouterModelInfo: {
  68. ...mockOpenRouterModelInfo,
  69. maxTokens: 128_000,
  70. thinking: true,
  71. },
  72. modelMaxTokens: 32_768,
  73. modelMaxThinkingTokens: 16_384,
  74. })
  75. const result = handler.getModel()
  76. expect(result.maxTokens).toBe(32_768)
  77. expect(result.thinking).toEqual({ type: "enabled", budget_tokens: 16_384 })
  78. expect(result.temperature).toBe(1.0)
  79. })
  80. it("does not honor custom maxTokens for non-thinking models", () => {
  81. const handler = new OpenRouterHandler({
  82. ...mockOptions,
  83. modelMaxTokens: 32_768,
  84. modelMaxThinkingTokens: 16_384,
  85. })
  86. const result = handler.getModel()
  87. expect(result.maxTokens).toBe(1000)
  88. expect(result.thinking).toBeUndefined()
  89. expect(result.temperature).toBe(0)
  90. })
  91. })
  92. describe("createMessage", () => {
  93. it("generates correct stream chunks", async () => {
  94. const handler = new OpenRouterHandler(mockOptions)
  95. const mockStream = {
  96. async *[Symbol.asyncIterator]() {
  97. yield {
  98. id: "test-id",
  99. choices: [{ delta: { content: "test response" } }],
  100. }
  101. yield {
  102. id: "test-id",
  103. choices: [{ delta: {} }],
  104. usage: { prompt_tokens: 10, completion_tokens: 20, cost: 0.001 },
  105. }
  106. },
  107. }
  108. // Mock OpenAI chat.completions.create
  109. const mockCreate = jest.fn().mockResolvedValue(mockStream)
  110. ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
  111. completions: { create: mockCreate },
  112. } as any
  113. const systemPrompt = "test system prompt"
  114. const messages: Anthropic.Messages.MessageParam[] = [{ role: "user" as const, content: "test message" }]
  115. const generator = handler.createMessage(systemPrompt, messages)
  116. const chunks = []
  117. for await (const chunk of generator) {
  118. chunks.push(chunk)
  119. }
  120. // Verify stream chunks
  121. expect(chunks).toHaveLength(2) // One text chunk and one usage chunk
  122. expect(chunks[0]).toEqual({ type: "text", text: "test response" })
  123. expect(chunks[1]).toEqual({ type: "usage", inputTokens: 10, outputTokens: 20, totalCost: 0.001 })
  124. // Verify OpenAI client was called with correct parameters
  125. expect(mockCreate).toHaveBeenCalledWith(
  126. expect.objectContaining({
  127. model: mockOptions.openRouterModelId,
  128. temperature: 0,
  129. messages: expect.arrayContaining([
  130. { role: "system", content: systemPrompt },
  131. { role: "user", content: "test message" },
  132. ]),
  133. stream: true,
  134. }),
  135. )
  136. })
  137. it("supports the middle-out transform", async () => {
  138. const handler = new OpenRouterHandler({
  139. ...mockOptions,
  140. openRouterUseMiddleOutTransform: true,
  141. })
  142. const mockStream = {
  143. async *[Symbol.asyncIterator]() {
  144. yield {
  145. id: "test-id",
  146. choices: [{ delta: { content: "test response" } }],
  147. }
  148. },
  149. }
  150. const mockCreate = jest.fn().mockResolvedValue(mockStream)
  151. ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
  152. completions: { create: mockCreate },
  153. } as any
  154. ;(axios.get as jest.Mock).mockResolvedValue({ data: { data: {} } })
  155. await handler.createMessage("test", []).next()
  156. expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({ transforms: ["middle-out"] }))
  157. })
  158. it("adds cache control for supported models", async () => {
  159. const handler = new OpenRouterHandler({
  160. ...mockOptions,
  161. openRouterModelInfo: {
  162. ...mockOpenRouterModelInfo,
  163. supportsPromptCache: true,
  164. },
  165. openRouterModelId: "anthropic/claude-3.5-sonnet",
  166. })
  167. const mockStream = {
  168. async *[Symbol.asyncIterator]() {
  169. yield {
  170. id: "test-id",
  171. choices: [{ delta: { content: "test response" } }],
  172. }
  173. },
  174. }
  175. const mockCreate = jest.fn().mockResolvedValue(mockStream)
  176. ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
  177. completions: { create: mockCreate },
  178. } as any
  179. ;(axios.get as jest.Mock).mockResolvedValue({ data: { data: {} } })
  180. const messages: Anthropic.Messages.MessageParam[] = [
  181. { role: "user", content: "message 1" },
  182. { role: "assistant", content: "response 1" },
  183. { role: "user", content: "message 2" },
  184. ]
  185. await handler.createMessage("test system", messages).next()
  186. expect(mockCreate).toHaveBeenCalledWith(
  187. expect.objectContaining({
  188. messages: expect.arrayContaining([
  189. expect.objectContaining({
  190. role: "system",
  191. content: expect.arrayContaining([
  192. expect.objectContaining({ cache_control: { type: "ephemeral" } }),
  193. ]),
  194. }),
  195. ]),
  196. }),
  197. )
  198. })
  199. it("handles API errors", async () => {
  200. const handler = new OpenRouterHandler(mockOptions)
  201. const mockStream = {
  202. async *[Symbol.asyncIterator]() {
  203. yield { error: { message: "API Error", code: 500 } }
  204. },
  205. }
  206. const mockCreate = jest.fn().mockResolvedValue(mockStream)
  207. ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
  208. completions: { create: mockCreate },
  209. } as any
  210. const generator = handler.createMessage("test", [])
  211. await expect(generator.next()).rejects.toThrow("OpenRouter API Error 500: API Error")
  212. })
  213. })
  214. describe("completePrompt", () => {
  215. it("returns correct response", async () => {
  216. const handler = new OpenRouterHandler(mockOptions)
  217. const mockResponse = { choices: [{ message: { content: "test completion" } }] }
  218. const mockCreate = jest.fn().mockResolvedValue(mockResponse)
  219. ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
  220. completions: { create: mockCreate },
  221. } as any
  222. const result = await handler.completePrompt("test prompt")
  223. expect(result).toBe("test completion")
  224. expect(mockCreate).toHaveBeenCalledWith({
  225. model: mockOptions.openRouterModelId,
  226. max_tokens: 1000,
  227. thinking: undefined,
  228. temperature: 0,
  229. messages: [{ role: "user", content: "test prompt" }],
  230. stream: false,
  231. })
  232. })
  233. it("handles API errors", async () => {
  234. const handler = new OpenRouterHandler(mockOptions)
  235. const mockError = {
  236. error: {
  237. message: "API Error",
  238. code: 500,
  239. },
  240. }
  241. const mockCreate = jest.fn().mockResolvedValue(mockError)
  242. ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
  243. completions: { create: mockCreate },
  244. } as any
  245. await expect(handler.completePrompt("test prompt")).rejects.toThrow("OpenRouter API Error 500: API Error")
  246. })
  247. it("handles unexpected errors", async () => {
  248. const handler = new OpenRouterHandler(mockOptions)
  249. const mockCreate = jest.fn().mockRejectedValue(new Error("Unexpected error"))
  250. ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
  251. completions: { create: mockCreate },
  252. } as any
  253. await expect(handler.completePrompt("test prompt")).rejects.toThrow("Unexpected error")
  254. })
  255. })
  256. })