openai-embeddings-forwarder.test.ts 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134
  1. import { beforeEach, describe, expect, it, vi } from "vitest";
  2. import { resolveEndpointPolicy } from "@/app/v1/_lib/proxy/endpoint-policy";
  3. import { ProxyForwarder } from "@/app/v1/_lib/proxy/forwarder";
  4. import { ProxySession } from "@/app/v1/_lib/proxy/session";
  5. import type { Provider } from "@/types/provider";
  6. vi.mock("@/lib/logger", () => ({
  7. logger: {
  8. debug: vi.fn(),
  9. error: vi.fn(),
  10. info: vi.fn(),
  11. trace: vi.fn(),
  12. warn: vi.fn(),
  13. fatal: vi.fn(),
  14. },
  15. }));
  16. vi.mock("@/lib/request-filter-engine", () => ({
  17. requestFilterEngine: {
  18. applyFinal: vi.fn(async () => {}),
  19. },
  20. }));
  21. function createProvider(): Provider {
  22. return {
  23. id: 1,
  24. name: "openai-upstream",
  25. providerType: "openai-compatible",
  26. url: "https://openai.example.com/openai",
  27. key: "upstream-key",
  28. preserveClientIp: false,
  29. priority: 0,
  30. costMultiplier: 1,
  31. maxRetryAttempts: 1,
  32. mcpPassthroughType: "minimax",
  33. mcpPassthroughUrl: "https://mcp.example.com",
  34. } as unknown as Provider;
  35. }
  36. function createSession(): ProxySession {
  37. const headers = new Headers({
  38. "content-type": "application/json",
  39. authorization: "Bearer proxy-user-key",
  40. });
  41. const session = Object.create(ProxySession.prototype);
  42. Object.assign(session, {
  43. startTime: Date.now(),
  44. method: "POST",
  45. requestUrl: new URL("https://proxy.example.com/v1/embeddings"),
  46. headers,
  47. originalHeaders: new Headers(headers),
  48. headerLog: JSON.stringify(Object.fromEntries(headers.entries())),
  49. request: {
  50. model: "text-embedding-3-large",
  51. log: JSON.stringify({
  52. model: "text-embedding-3-large",
  53. input: "embedding me",
  54. }),
  55. message: {
  56. model: "text-embedding-3-large",
  57. input: "embedding me",
  58. },
  59. },
  60. userAgent: "OpenAITest/1.0",
  61. context: null,
  62. clientAbortSignal: null,
  63. userName: "test-user",
  64. authState: { success: true, user: null, key: null, apiKey: null },
  65. provider: null,
  66. messageContext: null,
  67. sessionId: null,
  68. requestSequence: 1,
  69. originalFormat: "openai",
  70. providerType: null,
  71. originalModelName: null,
  72. originalUrlPathname: null,
  73. providerChain: [],
  74. cacheTtlResolved: null,
  75. context1mApplied: false,
  76. cachedPriceData: undefined,
  77. cachedBillingModelSource: undefined,
  78. forwardedRequestBody: null,
  79. endpointPolicy: resolveEndpointPolicy("/v1/embeddings"),
  80. setCacheTtlResolved: vi.fn(),
  81. getCacheTtlResolved: vi.fn(() => null),
  82. getCurrentModel: vi.fn(() => "text-embedding-3-large"),
  83. clientRequestsContext1m: vi.fn(() => false),
  84. setContext1mApplied: vi.fn(),
  85. getContext1mApplied: vi.fn(() => false),
  86. getEndpointPolicy: vi.fn(() => resolveEndpointPolicy("/v1/embeddings")),
  87. isHeaderModified: vi.fn(() => false),
  88. });
  89. return session as ProxySession;
  90. }
  91. describe("ProxyForwarder - OpenAI embeddings standard endpoint handling", () => {
  92. beforeEach(() => {
  93. vi.clearAllMocks();
  94. });
  95. it("does not route /v1/embeddings through MCP passthrough URL", async () => {
  96. const provider = createProvider();
  97. const session = createSession();
  98. let capturedUrl: string | null = null;
  99. const fetchWithoutAutoDecode = vi.spyOn(ProxyForwarder as never, "fetchWithoutAutoDecode");
  100. fetchWithoutAutoDecode.mockImplementationOnce(async (url: string) => {
  101. capturedUrl = url;
  102. return new Response(
  103. JSON.stringify({
  104. object: "list",
  105. data: [{ object: "embedding", embedding: [0.1, 0.2], index: 0 }],
  106. model: "text-embedding-3-large",
  107. usage: { prompt_tokens: 3, total_tokens: 3 },
  108. }),
  109. {
  110. status: 200,
  111. headers: { "content-type": "application/json" },
  112. }
  113. );
  114. });
  115. const { doForward } = ProxyForwarder as unknown as {
  116. doForward: (session: ProxySession, provider: Provider, baseUrl: string) => Promise<Response>;
  117. };
  118. await doForward(session, provider, provider.url);
  119. expect(capturedUrl).toBe("https://openai.example.com/openai/v1/embeddings");
  120. expect(capturedUrl?.startsWith("https://mcp.example.com")).toBe(false);
  121. });
  122. });