proxy-handler-session-id-error.test.ts 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252
  1. import { describe, expect, test, vi } from "vitest";
  2. import { resolveEndpointPolicy } from "@/app/v1/_lib/proxy/endpoint-policy";
  3. import { V1_ENDPOINT_PATHS } from "@/app/v1/_lib/proxy/endpoint-paths";
  4. import { ProxyResponses } from "@/app/v1/_lib/proxy/responses";
  5. import { ProxyError } from "@/app/v1/_lib/proxy/errors";
  6. const h = vi.hoisted(() => ({
  7. session: {
  8. originalFormat: "openai",
  9. sessionId: "s_123",
  10. requestUrl: new URL("http://localhost/v1/messages"),
  11. request: {
  12. model: "gpt",
  13. message: {},
  14. },
  15. getEndpointPolicy: () => resolveEndpointPolicy(h.session.requestUrl.pathname),
  16. isCountTokensRequest: () => false,
  17. setOriginalFormat: () => {},
  18. recordForwardStart: () => {},
  19. messageContext: null,
  20. provider: null,
  21. } as any,
  22. fromContextError: null as unknown,
  23. pipelineError: null as unknown,
  24. earlyResponse: null as Response | null,
  25. forwardResponse: new Response("ok", { status: 200 }),
  26. dispatchedResponse: null as Response | null,
  27. endpointFormat: null as string | null,
  28. trackerCalls: [] as string[],
  29. }));
  30. vi.mock("@/app/v1/_lib/proxy/session", () => ({
  31. ProxySession: {
  32. fromContext: async () => {
  33. if (h.fromContextError) throw h.fromContextError;
  34. return h.session;
  35. },
  36. },
  37. }));
  38. vi.mock("@/app/v1/_lib/proxy/guard-pipeline", () => ({
  39. RequestType: { CHAT: "CHAT", COUNT_TOKENS: "COUNT_TOKENS" },
  40. GuardPipelineBuilder: {
  41. fromSession: () => ({
  42. run: async () => {
  43. if (h.pipelineError) throw h.pipelineError;
  44. return h.earlyResponse;
  45. },
  46. }),
  47. fromRequestType: () => ({
  48. run: async () => {
  49. if (h.pipelineError) throw h.pipelineError;
  50. return h.earlyResponse;
  51. },
  52. }),
  53. },
  54. }));
  55. vi.mock("@/app/v1/_lib/proxy/format-mapper", () => ({
  56. detectClientFormat: () => "openai",
  57. detectFormatByEndpoint: () => h.endpointFormat,
  58. }));
  59. vi.mock("@/app/v1/_lib/proxy/forwarder", () => ({
  60. ProxyForwarder: {
  61. send: async () => h.forwardResponse,
  62. },
  63. }));
  64. vi.mock("@/app/v1/_lib/proxy/response-handler", () => ({
  65. ProxyResponseHandler: {
  66. dispatch: async () => h.dispatchedResponse ?? h.forwardResponse,
  67. },
  68. }));
  69. vi.mock("@/app/v1/_lib/proxy/error-handler", () => ({
  70. ProxyErrorHandler: {
  71. handle: async () => new Response("handled", { status: 502 }),
  72. },
  73. }));
  74. vi.mock("@/lib/session-tracker", () => ({
  75. SessionTracker: {
  76. incrementConcurrentCount: async () => {
  77. h.trackerCalls.push("inc");
  78. },
  79. decrementConcurrentCount: async () => {
  80. h.trackerCalls.push("dec");
  81. },
  82. },
  83. }));
  84. vi.mock("@/lib/proxy-status-tracker", () => ({
  85. ProxyStatusTracker: {
  86. getInstance: () => ({
  87. startRequest: () => {
  88. h.trackerCalls.push("startRequest");
  89. },
  90. endRequest: () => {},
  91. }),
  92. },
  93. }));
  94. describe("handleProxyRequest - session id on errors", async () => {
  95. const { handleProxyRequest } = await import("@/app/v1/_lib/proxy-handler");
  96. test("decorates early error response with x-cch-session-id and message suffix", async () => {
  97. h.fromContextError = null;
  98. h.session.originalFormat = "openai";
  99. h.endpointFormat = null;
  100. h.trackerCalls.length = 0;
  101. h.pipelineError = null;
  102. h.earlyResponse = ProxyResponses.buildError(400, "bad request");
  103. const res = await handleProxyRequest({} as any);
  104. expect(res.status).toBe(400);
  105. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  106. const body = await res.json();
  107. expect(body.error.message).toBe("bad request (cch_session_id: s_123)");
  108. });
  109. test("decorates dispatch error response with x-cch-session-id and message suffix", async () => {
  110. h.fromContextError = null;
  111. h.session.originalFormat = "openai";
  112. h.endpointFormat = null;
  113. h.trackerCalls.length = 0;
  114. h.pipelineError = null;
  115. h.earlyResponse = null;
  116. h.forwardResponse = new Response("upstream", { status: 502 });
  117. h.dispatchedResponse = ProxyResponses.buildError(502, "bad gateway");
  118. const res = await handleProxyRequest({} as any);
  119. expect(res.status).toBe(502);
  120. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  121. const body = await res.json();
  122. expect(body.error.message).toBe("bad gateway (cch_session_id: s_123)");
  123. });
  124. test("covers claude format detection branch without breaking behavior", async () => {
  125. h.fromContextError = null;
  126. h.session.originalFormat = "claude";
  127. h.endpointFormat = null;
  128. h.trackerCalls.length = 0;
  129. h.pipelineError = null;
  130. h.earlyResponse = ProxyResponses.buildError(400, "bad request");
  131. h.session.requestUrl = new URL("http://localhost/v1/unknown");
  132. h.session.request = { model: "gpt", message: { contents: [] } };
  133. const res = await handleProxyRequest({} as any);
  134. expect(res.status).toBe(400);
  135. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  136. });
  137. test("covers endpoint format detection + tracking + finally decrement", async () => {
  138. h.fromContextError = null;
  139. h.session.originalFormat = "claude";
  140. h.endpointFormat = "openai";
  141. h.trackerCalls.length = 0;
  142. h.pipelineError = null;
  143. h.earlyResponse = null;
  144. h.forwardResponse = new Response("ok", { status: 200 });
  145. h.dispatchedResponse = null;
  146. h.session.sessionId = "s_123";
  147. h.session.messageContext = { id: 1, user: { id: 1, name: "u" }, key: { name: "k" } };
  148. h.session.provider = { id: 1, name: "p" };
  149. h.session.isCountTokensRequest = () => false;
  150. const res = await handleProxyRequest({} as any);
  151. expect(res.status).toBe(200);
  152. expect(h.trackerCalls).toEqual(["inc", "startRequest", "dec"]);
  153. });
  154. test.each([
  155. {
  156. pathname: V1_ENDPOINT_PATHS.MESSAGES_COUNT_TOKENS,
  157. isCountTokensRequest: true,
  158. },
  159. {
  160. pathname: V1_ENDPOINT_PATHS.RESPONSES_COMPACT,
  161. isCountTokensRequest: false,
  162. },
  163. ])("RED: raw endpoint $pathname 应统一跳过并发计数(Wave2 未实现前会失败)", async ({
  164. pathname,
  165. isCountTokensRequest,
  166. }) => {
  167. h.fromContextError = null;
  168. h.session.originalFormat = "claude";
  169. h.endpointFormat = "openai";
  170. h.trackerCalls.length = 0;
  171. h.pipelineError = null;
  172. h.earlyResponse = null;
  173. h.forwardResponse = new Response("ok", { status: 200 });
  174. h.dispatchedResponse = null;
  175. h.session.requestUrl = new URL(`http://localhost${pathname}`);
  176. h.session.getEndpointPolicy = () => resolveEndpointPolicy(h.session.requestUrl.pathname);
  177. h.session.sessionId = "s_123";
  178. h.session.messageContext = { id: 1, user: { id: 1, name: "u" }, key: { name: "k" } };
  179. h.session.provider = { id: 1, name: "p" };
  180. h.session.isCountTokensRequest = () => isCountTokensRequest;
  181. const res = await handleProxyRequest({} as any);
  182. expect(res.status).toBe(200);
  183. expect(h.trackerCalls).toEqual(["startRequest"]);
  184. });
  185. test("session not created and ProxyError thrown: returns buildError without session header", async () => {
  186. h.fromContextError = new ProxyError("upstream", 401);
  187. h.endpointFormat = null;
  188. h.trackerCalls.length = 0;
  189. h.pipelineError = null;
  190. h.earlyResponse = null;
  191. const res = await handleProxyRequest({} as any);
  192. expect(res.status).toBe(401);
  193. expect(res.headers.get("x-cch-session-id")).toBeNull();
  194. const body = await res.json();
  195. expect(body.error.message).toBe("upstream");
  196. });
  197. test("session created but pipeline throws: routes to ProxyErrorHandler.handle", async () => {
  198. h.fromContextError = null;
  199. h.endpointFormat = null;
  200. h.trackerCalls.length = 0;
  201. h.pipelineError = new Error("pipeline boom");
  202. h.earlyResponse = null;
  203. const res = await handleProxyRequest({} as any);
  204. expect(res.status).toBe(502);
  205. expect(await res.text()).toBe("handled");
  206. });
  207. test("session not created and non-ProxyError thrown: returns 500 buildError", async () => {
  208. h.fromContextError = new Error("boom");
  209. h.endpointFormat = null;
  210. h.trackerCalls.length = 0;
  211. h.pipelineError = null;
  212. h.earlyResponse = null;
  213. const res = await handleProxyRequest({} as any);
  214. expect(res.status).toBe(500);
  215. const body = await res.json();
  216. expect(body.error.message).toBe("代理请求发生未知错误");
  217. });
  218. });