proxy-handler-session-id-error.test.ts 6.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208
  1. import { describe, expect, test, vi } from "vitest";
  2. import { ProxyResponses } from "@/app/v1/_lib/proxy/responses";
  3. import { ProxyError } from "@/app/v1/_lib/proxy/errors";
  4. const h = vi.hoisted(() => ({
  5. session: {
  6. originalFormat: "openai",
  7. sessionId: "s_123",
  8. requestUrl: new URL("http://localhost/v1/messages"),
  9. request: {
  10. model: "gpt",
  11. message: {},
  12. },
  13. isCountTokensRequest: () => false,
  14. setOriginalFormat: () => {},
  15. recordForwardStart: () => {},
  16. messageContext: null,
  17. provider: null,
  18. } as any,
  19. fromContextError: null as unknown,
  20. pipelineError: null as unknown,
  21. earlyResponse: null as Response | null,
  22. forwardResponse: new Response("ok", { status: 200 }),
  23. dispatchedResponse: null as Response | null,
  24. endpointFormat: null as string | null,
  25. trackerCalls: [] as string[],
  26. }));
  27. vi.mock("@/app/v1/_lib/proxy/session", () => ({
  28. ProxySession: {
  29. fromContext: async () => {
  30. if (h.fromContextError) throw h.fromContextError;
  31. return h.session;
  32. },
  33. },
  34. }));
  35. vi.mock("@/app/v1/_lib/proxy/guard-pipeline", () => ({
  36. RequestType: { CHAT: "CHAT", COUNT_TOKENS: "COUNT_TOKENS" },
  37. GuardPipelineBuilder: {
  38. fromRequestType: () => ({
  39. run: async () => {
  40. if (h.pipelineError) throw h.pipelineError;
  41. return h.earlyResponse;
  42. },
  43. }),
  44. },
  45. }));
  46. vi.mock("@/app/v1/_lib/proxy/format-mapper", () => ({
  47. detectClientFormat: () => "openai",
  48. detectFormatByEndpoint: () => h.endpointFormat,
  49. }));
  50. vi.mock("@/app/v1/_lib/proxy/forwarder", () => ({
  51. ProxyForwarder: {
  52. send: async () => h.forwardResponse,
  53. },
  54. }));
  55. vi.mock("@/app/v1/_lib/proxy/response-handler", () => ({
  56. ProxyResponseHandler: {
  57. dispatch: async () => h.dispatchedResponse ?? h.forwardResponse,
  58. },
  59. }));
  60. vi.mock("@/app/v1/_lib/proxy/error-handler", () => ({
  61. ProxyErrorHandler: {
  62. handle: async () => new Response("handled", { status: 502 }),
  63. },
  64. }));
  65. vi.mock("@/lib/session-tracker", () => ({
  66. SessionTracker: {
  67. incrementConcurrentCount: async () => {
  68. h.trackerCalls.push("inc");
  69. },
  70. decrementConcurrentCount: async () => {
  71. h.trackerCalls.push("dec");
  72. },
  73. },
  74. }));
  75. vi.mock("@/lib/proxy-status-tracker", () => ({
  76. ProxyStatusTracker: {
  77. getInstance: () => ({
  78. startRequest: () => {
  79. h.trackerCalls.push("startRequest");
  80. },
  81. endRequest: () => {},
  82. }),
  83. },
  84. }));
  85. describe("handleProxyRequest - session id on errors", async () => {
  86. const { handleProxyRequest } = await import("@/app/v1/_lib/proxy-handler");
  87. test("decorates early error response with x-cch-session-id and message suffix", async () => {
  88. h.fromContextError = null;
  89. h.session.originalFormat = "openai";
  90. h.endpointFormat = null;
  91. h.trackerCalls.length = 0;
  92. h.pipelineError = null;
  93. h.earlyResponse = ProxyResponses.buildError(400, "bad request");
  94. const res = await handleProxyRequest({} as any);
  95. expect(res.status).toBe(400);
  96. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  97. const body = await res.json();
  98. expect(body.error.message).toBe("bad request (cch_session_id: s_123)");
  99. });
  100. test("decorates dispatch error response with x-cch-session-id and message suffix", async () => {
  101. h.fromContextError = null;
  102. h.session.originalFormat = "openai";
  103. h.endpointFormat = null;
  104. h.trackerCalls.length = 0;
  105. h.pipelineError = null;
  106. h.earlyResponse = null;
  107. h.forwardResponse = new Response("upstream", { status: 502 });
  108. h.dispatchedResponse = ProxyResponses.buildError(502, "bad gateway");
  109. const res = await handleProxyRequest({} as any);
  110. expect(res.status).toBe(502);
  111. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  112. const body = await res.json();
  113. expect(body.error.message).toBe("bad gateway (cch_session_id: s_123)");
  114. });
  115. test("covers claude format detection branch without breaking behavior", async () => {
  116. h.fromContextError = null;
  117. h.session.originalFormat = "claude";
  118. h.endpointFormat = null;
  119. h.trackerCalls.length = 0;
  120. h.pipelineError = null;
  121. h.earlyResponse = ProxyResponses.buildError(400, "bad request");
  122. h.session.requestUrl = new URL("http://localhost/v1/unknown");
  123. h.session.request = { model: "gpt", message: { contents: [] } };
  124. const res = await handleProxyRequest({} as any);
  125. expect(res.status).toBe(400);
  126. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  127. });
  128. test("covers endpoint format detection + tracking + finally decrement", async () => {
  129. h.fromContextError = null;
  130. h.session.originalFormat = "claude";
  131. h.endpointFormat = "openai";
  132. h.trackerCalls.length = 0;
  133. h.pipelineError = null;
  134. h.earlyResponse = null;
  135. h.forwardResponse = new Response("ok", { status: 200 });
  136. h.dispatchedResponse = null;
  137. h.session.sessionId = "s_123";
  138. h.session.messageContext = { id: 1, user: { id: 1, name: "u" }, key: { name: "k" } };
  139. h.session.provider = { id: 1, name: "p" };
  140. h.session.isCountTokensRequest = () => false;
  141. const res = await handleProxyRequest({} as any);
  142. expect(res.status).toBe(200);
  143. expect(h.trackerCalls).toEqual(["inc", "startRequest", "dec"]);
  144. });
  145. test("session not created and ProxyError thrown: returns buildError without session header", async () => {
  146. h.fromContextError = new ProxyError("upstream", 401);
  147. h.endpointFormat = null;
  148. h.trackerCalls.length = 0;
  149. h.pipelineError = null;
  150. h.earlyResponse = null;
  151. const res = await handleProxyRequest({} as any);
  152. expect(res.status).toBe(401);
  153. expect(res.headers.get("x-cch-session-id")).toBeNull();
  154. const body = await res.json();
  155. expect(body.error.message).toBe("upstream");
  156. });
  157. test("session created but pipeline throws: routes to ProxyErrorHandler.handle", async () => {
  158. h.fromContextError = null;
  159. h.endpointFormat = null;
  160. h.trackerCalls.length = 0;
  161. h.pipelineError = new Error("pipeline boom");
  162. h.earlyResponse = null;
  163. const res = await handleProxyRequest({} as any);
  164. expect(res.status).toBe(502);
  165. expect(await res.text()).toBe("handled");
  166. });
  167. test("session not created and non-ProxyError thrown: returns 500 buildError", async () => {
  168. h.fromContextError = new Error("boom");
  169. h.endpointFormat = null;
  170. h.trackerCalls.length = 0;
  171. h.pipelineError = null;
  172. h.earlyResponse = null;
  173. const res = await handleProxyRequest({} as any);
  174. expect(res.status).toBe(500);
  175. const body = await res.json();
  176. expect(body.error.message).toBe("代理请求发生未知错误");
  177. });
  178. });