proxy-handler-session-id-error.test.ts 8.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254
  1. import { describe, expect, test, vi } from "vitest";
  2. import { resolveEndpointPolicy } from "@/app/v1/_lib/proxy/endpoint-policy";
  3. import { V1_ENDPOINT_PATHS } from "@/app/v1/_lib/proxy/endpoint-paths";
  4. import { ProxyResponses } from "@/app/v1/_lib/proxy/responses";
  5. import { ProxyError } from "@/app/v1/_lib/proxy/errors";
  6. const h = vi.hoisted(() => ({
  7. session: {
  8. originalFormat: "openai",
  9. sessionId: "s_123",
  10. requestUrl: new URL("http://localhost/v1/messages"),
  11. request: {
  12. model: "gpt",
  13. message: {},
  14. },
  15. getEndpointPolicy: () => resolveEndpointPolicy(h.session.requestUrl.pathname),
  16. isCountTokensRequest: () => false,
  17. getProviderChain: () => [],
  18. setOriginalFormat: () => {},
  19. setHighConcurrencyModeEnabled: () => {},
  20. recordForwardStart: () => {},
  21. messageContext: null,
  22. provider: null,
  23. } as any,
  24. fromContextError: null as unknown,
  25. pipelineError: null as unknown,
  26. earlyResponse: null as Response | null,
  27. forwardResponse: new Response("ok", { status: 200 }),
  28. dispatchedResponse: null as Response | null,
  29. endpointFormat: null as string | null,
  30. trackerCalls: [] as string[],
  31. }));
  32. vi.mock("@/app/v1/_lib/proxy/session", () => ({
  33. ProxySession: {
  34. fromContext: async () => {
  35. if (h.fromContextError) throw h.fromContextError;
  36. return h.session;
  37. },
  38. },
  39. }));
  40. vi.mock("@/app/v1/_lib/proxy/guard-pipeline", () => ({
  41. RequestType: { CHAT: "CHAT", COUNT_TOKENS: "COUNT_TOKENS" },
  42. GuardPipelineBuilder: {
  43. fromSession: () => ({
  44. run: async () => {
  45. if (h.pipelineError) throw h.pipelineError;
  46. return h.earlyResponse;
  47. },
  48. }),
  49. fromRequestType: () => ({
  50. run: async () => {
  51. if (h.pipelineError) throw h.pipelineError;
  52. return h.earlyResponse;
  53. },
  54. }),
  55. },
  56. }));
  57. vi.mock("@/app/v1/_lib/proxy/format-mapper", () => ({
  58. detectClientFormat: () => "openai",
  59. detectFormatByEndpoint: () => h.endpointFormat,
  60. }));
  61. vi.mock("@/app/v1/_lib/proxy/forwarder", () => ({
  62. ProxyForwarder: {
  63. send: async () => h.forwardResponse,
  64. },
  65. }));
  66. vi.mock("@/app/v1/_lib/proxy/response-handler", () => ({
  67. ProxyResponseHandler: {
  68. dispatch: async () => h.dispatchedResponse ?? h.forwardResponse,
  69. },
  70. }));
  71. vi.mock("@/app/v1/_lib/proxy/error-handler", () => ({
  72. ProxyErrorHandler: {
  73. handle: async () => new Response("handled", { status: 502 }),
  74. },
  75. }));
  76. vi.mock("@/lib/session-tracker", () => ({
  77. SessionTracker: {
  78. incrementConcurrentCount: async () => {
  79. h.trackerCalls.push("inc");
  80. },
  81. decrementConcurrentCount: async () => {
  82. h.trackerCalls.push("dec");
  83. },
  84. },
  85. }));
  86. vi.mock("@/lib/proxy-status-tracker", () => ({
  87. ProxyStatusTracker: {
  88. getInstance: () => ({
  89. startRequest: () => {
  90. h.trackerCalls.push("startRequest");
  91. },
  92. endRequest: () => {},
  93. }),
  94. },
  95. }));
  96. describe("handleProxyRequest - session id on errors", async () => {
  97. const { handleProxyRequest } = await import("@/app/v1/_lib/proxy-handler");
  98. test("decorates early error response with x-cch-session-id and message suffix", async () => {
  99. h.fromContextError = null;
  100. h.session.originalFormat = "openai";
  101. h.endpointFormat = null;
  102. h.trackerCalls.length = 0;
  103. h.pipelineError = null;
  104. h.earlyResponse = ProxyResponses.buildError(400, "bad request");
  105. const res = await handleProxyRequest({} as any);
  106. expect(res.status).toBe(400);
  107. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  108. const body = await res.json();
  109. expect(body.error.message).toBe("bad request (cch_session_id: s_123)");
  110. });
  111. test("decorates dispatch error response with x-cch-session-id and message suffix", async () => {
  112. h.fromContextError = null;
  113. h.session.originalFormat = "openai";
  114. h.endpointFormat = null;
  115. h.trackerCalls.length = 0;
  116. h.pipelineError = null;
  117. h.earlyResponse = null;
  118. h.forwardResponse = new Response("upstream", { status: 502 });
  119. h.dispatchedResponse = ProxyResponses.buildError(502, "bad gateway");
  120. const res = await handleProxyRequest({} as any);
  121. expect(res.status).toBe(502);
  122. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  123. const body = await res.json();
  124. expect(body.error.message).toBe("bad gateway (cch_session_id: s_123)");
  125. });
  126. test("covers claude format detection branch without breaking behavior", async () => {
  127. h.fromContextError = null;
  128. h.session.originalFormat = "claude";
  129. h.endpointFormat = null;
  130. h.trackerCalls.length = 0;
  131. h.pipelineError = null;
  132. h.earlyResponse = ProxyResponses.buildError(400, "bad request");
  133. h.session.requestUrl = new URL("http://localhost/v1/unknown");
  134. h.session.request = { model: "gpt", message: { contents: [] } };
  135. const res = await handleProxyRequest({} as any);
  136. expect(res.status).toBe(400);
  137. expect(res.headers.get("x-cch-session-id")).toBe("s_123");
  138. });
  139. test("covers endpoint format detection + tracking + finally decrement", async () => {
  140. h.fromContextError = null;
  141. h.session.originalFormat = "claude";
  142. h.endpointFormat = "openai";
  143. h.trackerCalls.length = 0;
  144. h.pipelineError = null;
  145. h.earlyResponse = null;
  146. h.forwardResponse = new Response("ok", { status: 200 });
  147. h.dispatchedResponse = null;
  148. h.session.sessionId = "s_123";
  149. h.session.messageContext = { id: 1, user: { id: 1, name: "u" }, key: { name: "k" } };
  150. h.session.provider = { id: 1, name: "p" };
  151. h.session.isCountTokensRequest = () => false;
  152. const res = await handleProxyRequest({} as any);
  153. expect(res.status).toBe(200);
  154. expect(h.trackerCalls).toEqual(["inc", "startRequest", "dec"]);
  155. });
  156. test.each([
  157. {
  158. pathname: V1_ENDPOINT_PATHS.MESSAGES_COUNT_TOKENS,
  159. isCountTokensRequest: true,
  160. },
  161. {
  162. pathname: V1_ENDPOINT_PATHS.RESPONSES_COMPACT,
  163. isCountTokensRequest: false,
  164. },
  165. ])("RED: raw endpoint $pathname 应统一跳过并发计数(Wave2 未实现前会失败)", async ({
  166. pathname,
  167. isCountTokensRequest,
  168. }) => {
  169. h.fromContextError = null;
  170. h.session.originalFormat = "claude";
  171. h.endpointFormat = "openai";
  172. h.trackerCalls.length = 0;
  173. h.pipelineError = null;
  174. h.earlyResponse = null;
  175. h.forwardResponse = new Response("ok", { status: 200 });
  176. h.dispatchedResponse = null;
  177. h.session.requestUrl = new URL(`http://localhost${pathname}`);
  178. h.session.getEndpointPolicy = () => resolveEndpointPolicy(h.session.requestUrl.pathname);
  179. h.session.sessionId = "s_123";
  180. h.session.messageContext = { id: 1, user: { id: 1, name: "u" }, key: { name: "k" } };
  181. h.session.provider = { id: 1, name: "p" };
  182. h.session.isCountTokensRequest = () => isCountTokensRequest;
  183. const res = await handleProxyRequest({} as any);
  184. expect(res.status).toBe(200);
  185. expect(h.trackerCalls).toEqual(["startRequest"]);
  186. });
  187. test("session not created and ProxyError thrown: returns buildError without session header", async () => {
  188. h.fromContextError = new ProxyError("upstream", 401);
  189. h.endpointFormat = null;
  190. h.trackerCalls.length = 0;
  191. h.pipelineError = null;
  192. h.earlyResponse = null;
  193. const res = await handleProxyRequest({} as any);
  194. expect(res.status).toBe(401);
  195. expect(res.headers.get("x-cch-session-id")).toBeNull();
  196. const body = await res.json();
  197. expect(body.error.message).toBe("upstream");
  198. });
  199. test("session created but pipeline throws: routes to ProxyErrorHandler.handle", async () => {
  200. h.fromContextError = null;
  201. h.endpointFormat = null;
  202. h.trackerCalls.length = 0;
  203. h.pipelineError = new Error("pipeline boom");
  204. h.earlyResponse = null;
  205. const res = await handleProxyRequest({} as any);
  206. expect(res.status).toBe(502);
  207. expect(await res.text()).toBe("handled");
  208. });
  209. test("session not created and non-ProxyError thrown: returns 500 buildError", async () => {
  210. h.fromContextError = new Error("boom");
  211. h.endpointFormat = null;
  212. h.trackerCalls.length = 0;
  213. h.pipelineError = null;
  214. h.earlyResponse = null;
  215. const res = await handleProxyRequest({} as any);
  216. expect(res.status).toBe(500);
  217. const body = await res.json();
  218. expect(body.error.message).toBe("代理请求发生未知错误");
  219. });
  220. });