proxy-handler-session-id-error.test.ts 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207
  1. import { describe, expect, test, vi } from "vitest";
  2. import { ProxyResponses } from "@/app/v1/_lib/proxy/responses";
  3. import { ProxyError } from "@/app/v1/_lib/proxy/errors";
  4. const h = vi.hoisted(() => ({
  5. session: {
  6. originalFormat: "openai",
  7. sessionId: "s_123",
  8. requestUrl: new URL("http://localhost/v1/messages"),
  9. request: {
  10. model: "gpt",
  11. message: {},
  12. },
  13. isCountTokensRequest: () => false,
  14. setOriginalFormat: () => {},
  15. messageContext: null,
  16. provider: null,
  17. } as any,
  18. fromContextError: null as unknown,
  19. pipelineError: null as unknown,
  20. earlyResponse: null as Response | null,
  21. forwardResponse: new Response("ok", { status: 200 }),
  22. dispatchedResponse: null as Response | null,
  23. endpointFormat: null as string | null,
  24. trackerCalls: [] as string[],
  25. }));
  26. vi.mock("@/app/v1/_lib/proxy/session", () => ({
  27. ProxySession: {
  28. fromContext: async () => {
  29. if (h.fromContextError) throw h.fromContextError;
  30. return h.session;
  31. },
  32. },
  33. }));
  34. vi.mock("@/app/v1/_lib/proxy/guard-pipeline", () => ({
  35. RequestType: { CHAT: "CHAT", COUNT_TOKENS: "COUNT_TOKENS" },
  36. GuardPipelineBuilder: {
  37. fromRequestType: () => ({
  38. run: async () => {
  39. if (h.pipelineError) throw h.pipelineError;
  40. return h.earlyResponse;
  41. },
  42. }),
  43. },
  44. }));
  45. vi.mock("@/app/v1/_lib/proxy/format-mapper", () => ({
  46. detectClientFormat: () => "openai",
  47. detectFormatByEndpoint: () => h.endpointFormat,
  48. }));
  49. vi.mock("@/app/v1/_lib/proxy/forwarder", () => ({
  50. ProxyForwarder: {
  51. send: async () => h.forwardResponse,
  52. },
  53. }));
  54. vi.mock("@/app/v1/_lib/proxy/response-handler", () => ({
  55. ProxyResponseHandler: {
  56. dispatch: async () => h.dispatchedResponse ?? h.forwardResponse,
  57. },
  58. }));
  59. vi.mock("@/app/v1/_lib/proxy/error-handler", () => ({
  60. ProxyErrorHandler: {
  61. handle: async () => new Response("handled", { status: 502 }),
  62. },
  63. }));
  64. vi.mock("@/lib/session-tracker", () => ({
  65. SessionTracker: {
  66. incrementConcurrentCount: async () => {
  67. h.trackerCalls.push("inc");
  68. },
  69. decrementConcurrentCount: async () => {
  70. h.trackerCalls.push("dec");
  71. },
  72. },
  73. }));
  74. vi.mock("@/lib/proxy-status-tracker", () => ({
  75. ProxyStatusTracker: {
  76. getInstance: () => ({
  77. startRequest: () => {
  78. h.trackerCalls.push("startRequest");
  79. },
  80. endRequest: () => {},
  81. }),
  82. },
  83. }));
  84. describe("handleProxyRequest - session id on errors", async () => {
  85. const { handleProxyRequest } = await import("@/app/v1/_lib/proxy-handler");
  86. test("decorates early error response with x-cch-session-id and message suffix", async () => {
  87. h.fromContextError = null;
  88. h.session.originalFormat = "openai";
  89. h.endpointFormat = null;
  90. h.trackerCalls.length = 0;
  91. h.pipelineError = null;
  92. h.earlyResponse = ProxyResponses.buildError(400, "bad request");
  93. const res = await handleProxyRequest({} as any);
  94. expect(res.status).toBe(400);
  95. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  96. const body = await res.json();
  97. expect(body.error.message).toBe("bad request (cch_session_id: s_123)");
  98. });
  99. test("decorates dispatch error response with x-cch-session-id and message suffix", async () => {
  100. h.fromContextError = null;
  101. h.session.originalFormat = "openai";
  102. h.endpointFormat = null;
  103. h.trackerCalls.length = 0;
  104. h.pipelineError = null;
  105. h.earlyResponse = null;
  106. h.forwardResponse = new Response("upstream", { status: 502 });
  107. h.dispatchedResponse = ProxyResponses.buildError(502, "bad gateway");
  108. const res = await handleProxyRequest({} as any);
  109. expect(res.status).toBe(502);
  110. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  111. const body = await res.json();
  112. expect(body.error.message).toBe("bad gateway (cch_session_id: s_123)");
  113. });
  114. test("covers claude format detection branch without breaking behavior", async () => {
  115. h.fromContextError = null;
  116. h.session.originalFormat = "claude";
  117. h.endpointFormat = null;
  118. h.trackerCalls.length = 0;
  119. h.pipelineError = null;
  120. h.earlyResponse = ProxyResponses.buildError(400, "bad request");
  121. h.session.requestUrl = new URL("http://localhost/v1/unknown");
  122. h.session.request = { model: "gpt", message: { contents: [] } };
  123. const res = await handleProxyRequest({} as any);
  124. expect(res.status).toBe(400);
  125. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  126. });
  127. test("covers endpoint format detection + tracking + finally decrement", async () => {
  128. h.fromContextError = null;
  129. h.session.originalFormat = "claude";
  130. h.endpointFormat = "openai";
  131. h.trackerCalls.length = 0;
  132. h.pipelineError = null;
  133. h.earlyResponse = null;
  134. h.forwardResponse = new Response("ok", { status: 200 });
  135. h.dispatchedResponse = null;
  136. h.session.sessionId = "s_123";
  137. h.session.messageContext = { id: 1, user: { id: 1, name: "u" }, key: { name: "k" } };
  138. h.session.provider = { id: 1, name: "p" };
  139. h.session.isCountTokensRequest = () => false;
  140. const res = await handleProxyRequest({} as any);
  141. expect(res.status).toBe(200);
  142. expect(h.trackerCalls).toEqual(["inc", "startRequest", "dec"]);
  143. });
  144. test("session not created and ProxyError thrown: returns buildError without session header", async () => {
  145. h.fromContextError = new ProxyError("upstream", 401);
  146. h.endpointFormat = null;
  147. h.trackerCalls.length = 0;
  148. h.pipelineError = null;
  149. h.earlyResponse = null;
  150. const res = await handleProxyRequest({} as any);
  151. expect(res.status).toBe(401);
  152. expect(res.headers.get("x-cch-session-id")).toBeNull();
  153. const body = await res.json();
  154. expect(body.error.message).toBe("upstream");
  155. });
  156. test("session created but pipeline throws: routes to ProxyErrorHandler.handle", async () => {
  157. h.fromContextError = null;
  158. h.endpointFormat = null;
  159. h.trackerCalls.length = 0;
  160. h.pipelineError = new Error("pipeline boom");
  161. h.earlyResponse = null;
  162. const res = await handleProxyRequest({} as any);
  163. expect(res.status).toBe(502);
  164. expect(await res.text()).toBe("handled");
  165. });
  166. test("session not created and non-ProxyError thrown: returns 500 buildError", async () => {
  167. h.fromContextError = new Error("boom");
  168. h.endpointFormat = null;
  169. h.trackerCalls.length = 0;
  170. h.pipelineError = null;
  171. h.earlyResponse = null;
  172. const res = await handleProxyRequest({} as any);
  173. expect(res.status).toBe(500);
  174. const body = await res.json();
  175. expect(body.error.message).toBe("代理请求发生未知错误");
  176. });
  177. });