prompt.test.ts 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647
  1. import path from "path"
  2. import fs from "fs/promises"
  3. import { describe, expect, test } from "bun:test"
  4. import { NamedError } from "@opencode-ai/util/error"
  5. import { fileURLToPath } from "url"
  6. import { Instance } from "../../src/project/instance"
  7. import { ModelID, ProviderID } from "../../src/provider/schema"
  8. import { Session } from "../../src/session"
  9. import { MessageV2 } from "../../src/session/message-v2"
  10. import { SessionPrompt } from "../../src/session/prompt"
  11. import { Log } from "../../src/util/log"
  12. import { tmpdir } from "../fixture/fixture"
  13. Log.init({ print: false })
  14. function defer<T>() {
  15. let resolve!: (value: T | PromiseLike<T>) => void
  16. const promise = new Promise<T>((done) => {
  17. resolve = done
  18. })
  19. return { promise, resolve }
  20. }
  21. function chat(text: string) {
  22. const payload =
  23. [
  24. `data: ${JSON.stringify({
  25. id: "chatcmpl-1",
  26. object: "chat.completion.chunk",
  27. choices: [{ delta: { role: "assistant" } }],
  28. })}`,
  29. `data: ${JSON.stringify({
  30. id: "chatcmpl-1",
  31. object: "chat.completion.chunk",
  32. choices: [{ delta: { content: text } }],
  33. })}`,
  34. `data: ${JSON.stringify({
  35. id: "chatcmpl-1",
  36. object: "chat.completion.chunk",
  37. choices: [{ delta: {}, finish_reason: "stop" }],
  38. })}`,
  39. "data: [DONE]",
  40. ].join("\n\n") + "\n\n"
  41. const encoder = new TextEncoder()
  42. return new ReadableStream<Uint8Array>({
  43. start(ctrl) {
  44. ctrl.enqueue(encoder.encode(payload))
  45. ctrl.close()
  46. },
  47. })
  48. }
  49. function hanging(ready: () => void) {
  50. const encoder = new TextEncoder()
  51. let timer: ReturnType<typeof setTimeout> | undefined
  52. const first =
  53. `data: ${JSON.stringify({
  54. id: "chatcmpl-1",
  55. object: "chat.completion.chunk",
  56. choices: [{ delta: { role: "assistant" } }],
  57. })}` + "\n\n"
  58. const rest =
  59. [
  60. `data: ${JSON.stringify({
  61. id: "chatcmpl-1",
  62. object: "chat.completion.chunk",
  63. choices: [{ delta: { content: "late" } }],
  64. })}`,
  65. `data: ${JSON.stringify({
  66. id: "chatcmpl-1",
  67. object: "chat.completion.chunk",
  68. choices: [{ delta: {}, finish_reason: "stop" }],
  69. })}`,
  70. "data: [DONE]",
  71. ].join("\n\n") + "\n\n"
  72. return new ReadableStream<Uint8Array>({
  73. start(ctrl) {
  74. ctrl.enqueue(encoder.encode(first))
  75. ready()
  76. timer = setTimeout(() => {
  77. ctrl.enqueue(encoder.encode(rest))
  78. ctrl.close()
  79. }, 10000)
  80. },
  81. cancel() {
  82. if (timer) clearTimeout(timer)
  83. },
  84. })
  85. }
  86. describe("session.prompt missing file", () => {
  87. test("does not fail the prompt when a file part is missing", async () => {
  88. await using tmp = await tmpdir({
  89. git: true,
  90. config: {
  91. agent: {
  92. build: {
  93. model: "openai/gpt-5.2",
  94. },
  95. },
  96. },
  97. })
  98. await Instance.provide({
  99. directory: tmp.path,
  100. fn: async () => {
  101. const session = await Session.create({})
  102. const missing = path.join(tmp.path, "does-not-exist.ts")
  103. const msg = await SessionPrompt.prompt({
  104. sessionID: session.id,
  105. agent: "build",
  106. noReply: true,
  107. parts: [
  108. { type: "text", text: "please review @does-not-exist.ts" },
  109. {
  110. type: "file",
  111. mime: "text/plain",
  112. url: `file://${missing}`,
  113. filename: "does-not-exist.ts",
  114. },
  115. ],
  116. })
  117. if (msg.info.role !== "user") throw new Error("expected user message")
  118. const hasFailure = msg.parts.some(
  119. (part) => part.type === "text" && part.synthetic && part.text.includes("Read tool failed to read"),
  120. )
  121. expect(hasFailure).toBe(true)
  122. await Session.remove(session.id)
  123. },
  124. })
  125. })
  126. test("keeps stored part order stable when file resolution is async", async () => {
  127. await using tmp = await tmpdir({
  128. git: true,
  129. config: {
  130. agent: {
  131. build: {
  132. model: "openai/gpt-5.2",
  133. },
  134. },
  135. },
  136. })
  137. await Instance.provide({
  138. directory: tmp.path,
  139. fn: async () => {
  140. const session = await Session.create({})
  141. const missing = path.join(tmp.path, "still-missing.ts")
  142. const msg = await SessionPrompt.prompt({
  143. sessionID: session.id,
  144. agent: "build",
  145. noReply: true,
  146. parts: [
  147. {
  148. type: "file",
  149. mime: "text/plain",
  150. url: `file://${missing}`,
  151. filename: "still-missing.ts",
  152. },
  153. { type: "text", text: "after-file" },
  154. ],
  155. })
  156. if (msg.info.role !== "user") throw new Error("expected user message")
  157. const stored = await MessageV2.get({
  158. sessionID: session.id,
  159. messageID: msg.info.id,
  160. })
  161. const text = stored.parts.filter((part) => part.type === "text").map((part) => part.text)
  162. expect(text[0]?.startsWith("Called the Read tool with the following input:")).toBe(true)
  163. expect(text[1]?.includes("Read tool failed to read")).toBe(true)
  164. expect(text[2]).toBe("after-file")
  165. await Session.remove(session.id)
  166. },
  167. })
  168. })
  169. })
  170. describe("session.prompt special characters", () => {
  171. test("handles filenames with # character", async () => {
  172. await using tmp = await tmpdir({
  173. git: true,
  174. init: async (dir) => {
  175. await Bun.write(path.join(dir, "file#name.txt"), "special content\n")
  176. },
  177. })
  178. await Instance.provide({
  179. directory: tmp.path,
  180. fn: async () => {
  181. const session = await Session.create({})
  182. const template = "Read @file#name.txt"
  183. const parts = await SessionPrompt.resolvePromptParts(template)
  184. const fileParts = parts.filter((part) => part.type === "file")
  185. expect(fileParts.length).toBe(1)
  186. expect(fileParts[0].filename).toBe("file#name.txt")
  187. expect(fileParts[0].url).toContain("%23")
  188. const decodedPath = fileURLToPath(fileParts[0].url)
  189. expect(decodedPath).toBe(path.join(tmp.path, "file#name.txt"))
  190. const message = await SessionPrompt.prompt({
  191. sessionID: session.id,
  192. parts,
  193. noReply: true,
  194. })
  195. const stored = await MessageV2.get({ sessionID: session.id, messageID: message.info.id })
  196. const textParts = stored.parts.filter((part) => part.type === "text")
  197. const hasContent = textParts.some((part) => part.text.includes("special content"))
  198. expect(hasContent).toBe(true)
  199. await Session.remove(session.id)
  200. },
  201. })
  202. })
  203. })
  204. describe("session.prompt regression", () => {
  205. test("does not loop empty assistant turns for a simple reply", async () => {
  206. let calls = 0
  207. const server = Bun.serve({
  208. port: 0,
  209. fetch(req) {
  210. const url = new URL(req.url)
  211. if (!url.pathname.endsWith("/chat/completions")) {
  212. return new Response("not found", { status: 404 })
  213. }
  214. calls++
  215. return new Response(chat("packages/opencode/src/session/processor.ts"), {
  216. status: 200,
  217. headers: { "Content-Type": "text/event-stream" },
  218. })
  219. },
  220. })
  221. try {
  222. await using tmp = await tmpdir({
  223. git: true,
  224. init: async (dir) => {
  225. await Bun.write(
  226. path.join(dir, "opencode.json"),
  227. JSON.stringify({
  228. $schema: "https://opencode.ai/config.json",
  229. enabled_providers: ["alibaba"],
  230. provider: {
  231. alibaba: {
  232. options: {
  233. apiKey: "test-key",
  234. baseURL: `${server.url.origin}/v1`,
  235. },
  236. },
  237. },
  238. agent: {
  239. build: {
  240. model: "alibaba/qwen-plus",
  241. },
  242. },
  243. }),
  244. )
  245. },
  246. })
  247. await Instance.provide({
  248. directory: tmp.path,
  249. fn: async () => {
  250. const session = await Session.create({ title: "Prompt regression" })
  251. const result = await SessionPrompt.prompt({
  252. sessionID: session.id,
  253. agent: "build",
  254. parts: [{ type: "text", text: "Where is SessionProcessor?" }],
  255. })
  256. expect(result.info.role).toBe("assistant")
  257. expect(result.parts.some((part) => part.type === "text" && part.text.includes("processor.ts"))).toBe(true)
  258. const msgs = await Session.messages({ sessionID: session.id })
  259. expect(msgs.filter((msg) => msg.info.role === "assistant")).toHaveLength(1)
  260. expect(calls).toBe(1)
  261. },
  262. })
  263. } finally {
  264. server.stop(true)
  265. }
  266. })
  267. test("records aborted errors when prompt is cancelled mid-stream", async () => {
  268. const ready = defer<void>()
  269. const server = Bun.serve({
  270. port: 0,
  271. fetch(req) {
  272. const url = new URL(req.url)
  273. if (!url.pathname.endsWith("/chat/completions")) {
  274. return new Response("not found", { status: 404 })
  275. }
  276. return new Response(
  277. hanging(() => ready.resolve()),
  278. {
  279. status: 200,
  280. headers: { "Content-Type": "text/event-stream" },
  281. },
  282. )
  283. },
  284. })
  285. try {
  286. await using tmp = await tmpdir({
  287. git: true,
  288. init: async (dir) => {
  289. await Bun.write(
  290. path.join(dir, "opencode.json"),
  291. JSON.stringify({
  292. $schema: "https://opencode.ai/config.json",
  293. enabled_providers: ["alibaba"],
  294. provider: {
  295. alibaba: {
  296. options: {
  297. apiKey: "test-key",
  298. baseURL: `${server.url.origin}/v1`,
  299. },
  300. },
  301. },
  302. agent: {
  303. build: {
  304. model: "alibaba/qwen-plus",
  305. },
  306. },
  307. }),
  308. )
  309. },
  310. })
  311. await Instance.provide({
  312. directory: tmp.path,
  313. fn: async () => {
  314. const session = await Session.create({ title: "Prompt cancel regression" })
  315. const run = SessionPrompt.prompt({
  316. sessionID: session.id,
  317. agent: "build",
  318. parts: [{ type: "text", text: "Cancel me" }],
  319. })
  320. await ready.promise
  321. await SessionPrompt.cancel(session.id)
  322. const result = await Promise.race([
  323. run,
  324. new Promise<never>((_, reject) =>
  325. setTimeout(() => reject(new Error("timed out waiting for cancel")), 1000),
  326. ),
  327. ])
  328. expect(result.info.role).toBe("assistant")
  329. if (result.info.role === "assistant") {
  330. expect(result.info.error?.name).toBe("MessageAbortedError")
  331. }
  332. const msgs = await Session.messages({ sessionID: session.id })
  333. const last = msgs.findLast((msg) => msg.info.role === "assistant")
  334. expect(last?.info.role).toBe("assistant")
  335. if (last?.info.role === "assistant") {
  336. expect(last.info.error?.name).toBe("MessageAbortedError")
  337. }
  338. },
  339. })
  340. } finally {
  341. server.stop(true)
  342. }
  343. })
  344. })
  345. describe("session.prompt agent variant", () => {
  346. test("applies agent variant only when using agent model", async () => {
  347. const prev = process.env.OPENAI_API_KEY
  348. process.env.OPENAI_API_KEY = "test-openai-key"
  349. try {
  350. await using tmp = await tmpdir({
  351. git: true,
  352. config: {
  353. agent: {
  354. build: {
  355. model: "openai/gpt-5.2",
  356. variant: "xhigh",
  357. },
  358. },
  359. },
  360. })
  361. await Instance.provide({
  362. directory: tmp.path,
  363. fn: async () => {
  364. const session = await Session.create({})
  365. const other = await SessionPrompt.prompt({
  366. sessionID: session.id,
  367. agent: "build",
  368. model: { providerID: ProviderID.make("opencode"), modelID: ModelID.make("kimi-k2.5-free") },
  369. noReply: true,
  370. parts: [{ type: "text", text: "hello" }],
  371. })
  372. if (other.info.role !== "user") throw new Error("expected user message")
  373. expect(other.info.model.variant).toBeUndefined()
  374. const match = await SessionPrompt.prompt({
  375. sessionID: session.id,
  376. agent: "build",
  377. noReply: true,
  378. parts: [{ type: "text", text: "hello again" }],
  379. })
  380. if (match.info.role !== "user") throw new Error("expected user message")
  381. expect(match.info.model).toEqual({
  382. providerID: ProviderID.make("openai"),
  383. modelID: ModelID.make("gpt-5.2"),
  384. variant: "xhigh",
  385. })
  386. expect(match.info.model.variant).toBe("xhigh")
  387. const override = await SessionPrompt.prompt({
  388. sessionID: session.id,
  389. agent: "build",
  390. noReply: true,
  391. variant: "high",
  392. parts: [{ type: "text", text: "hello third" }],
  393. })
  394. if (override.info.role !== "user") throw new Error("expected user message")
  395. expect(override.info.model.variant).toBe("high")
  396. await Session.remove(session.id)
  397. },
  398. })
  399. } finally {
  400. if (prev === undefined) delete process.env.OPENAI_API_KEY
  401. else process.env.OPENAI_API_KEY = prev
  402. }
  403. })
  404. })
  405. // kilocode_change start
  406. function deferred<T>() {
  407. const result = {} as { promise: Promise<T>; resolve: (value: T) => void }
  408. result.promise = new Promise((resolve) => {
  409. result.resolve = resolve
  410. })
  411. return result
  412. }
  413. describe("session.prompt abort", () => {
  414. test("returns the interrupted assistant turn when the current prompt is cancelled", async () => {
  415. const dir = path.dirname(fileURLToPath(import.meta.url))
  416. const fixtures = (await Bun.file(path.join(dir, "../tool/fixtures/models-api.json")).json()) as Record<
  417. string,
  418. { models: Record<string, { id: string }> } & Record<string, unknown>
  419. >
  420. const model = fixtures.openai.models["gpt-5.2"]
  421. const started = deferred<void>()
  422. const payload = new TextEncoder().encode(
  423. [
  424. `data: ${JSON.stringify({
  425. type: "response.created",
  426. response: {
  427. id: "resp-1",
  428. created_at: Math.floor(Date.now() / 1000),
  429. model: model.id,
  430. service_tier: null,
  431. },
  432. })}`,
  433. "",
  434. ].join("\n\n"),
  435. )
  436. const server = Bun.serve({
  437. port: 0,
  438. fetch(req: Request) {
  439. const url = new URL(req.url)
  440. if (!url.pathname.endsWith("/responses")) {
  441. return new Response("unexpected request", { status: 404 })
  442. }
  443. started.resolve()
  444. return new Response(
  445. new ReadableStream<Uint8Array>({
  446. start(controller) {
  447. controller.enqueue(payload)
  448. },
  449. }),
  450. {
  451. status: 200,
  452. headers: { "Content-Type": "text/event-stream" },
  453. },
  454. )
  455. },
  456. })
  457. try {
  458. await using tmp = await tmpdir({
  459. git: true,
  460. init: async (root) => {
  461. // kilocode_change start — project config must be at root, not in .opencode/ subdirectory
  462. await Bun.write(
  463. path.join(root, "opencode.json"),
  464. JSON.stringify({
  465. $schema: "https://app.kilo.ai/config.json",
  466. enabled_providers: ["openai"],
  467. provider: {
  468. openai: {
  469. name: "OpenAI",
  470. env: ["OPENAI_API_KEY"],
  471. npm: "@ai-sdk/openai",
  472. api: "https://api.openai.com/v1",
  473. models: {
  474. [model.id]: model,
  475. },
  476. options: {
  477. apiKey: "test-openai-key",
  478. baseURL: `${server.url.origin}/v1`,
  479. },
  480. },
  481. },
  482. }),
  483. )
  484. },
  485. })
  486. await Instance.provide({
  487. directory: tmp.path,
  488. fn: async () => {
  489. const session = await Session.create({})
  490. const run = SessionPrompt.prompt({
  491. sessionID: session.id,
  492. model: {
  493. providerID: ProviderID.make("openai"),
  494. modelID: ModelID.make(model.id),
  495. },
  496. parts: [{ type: "text", text: "say hello" }],
  497. })
  498. await started.promise
  499. SessionPrompt.cancel(session.id)
  500. const result = await run
  501. expect(result.info.role).toBe("assistant")
  502. if (result.info.role !== "assistant") throw new Error("expected assistant message")
  503. // kilocode_change start — re-read from DB; the abort error is set asynchronously by the processor
  504. const messages = await Session.messages({ sessionID: session.id })
  505. const assistant = messages.find((item) => item.info.role === "assistant")
  506. expect(assistant).toBeDefined()
  507. expect(assistant?.info.id).toBe(result.info.id)
  508. if (assistant?.info.role === "assistant" && assistant.info.error) {
  509. expect(assistant.info.error.name).toBe("MessageAbortedError")
  510. }
  511. // kilocode_change end
  512. await Session.remove(session.id)
  513. },
  514. })
  515. } finally {
  516. server.stop(true)
  517. }
  518. }, 15000)
  519. })
  520. // kilocode_change end
  521. describe("session.agent-resolution", () => {
  522. test("unknown agent throws typed error", async () => {
  523. await using tmp = await tmpdir({ git: true })
  524. await Instance.provide({
  525. directory: tmp.path,
  526. fn: async () => {
  527. const session = await Session.create({})
  528. const err = await SessionPrompt.prompt({
  529. sessionID: session.id,
  530. agent: "nonexistent-agent-xyz",
  531. noReply: true,
  532. parts: [{ type: "text", text: "hello" }],
  533. }).then(
  534. () => undefined,
  535. (e) => e,
  536. )
  537. expect(err).toBeDefined()
  538. expect(err).not.toBeInstanceOf(TypeError)
  539. expect(NamedError.Unknown.isInstance(err)).toBe(true)
  540. if (NamedError.Unknown.isInstance(err)) {
  541. expect(err.data.message).toContain('Agent not found: "nonexistent-agent-xyz"')
  542. }
  543. },
  544. })
  545. }, 30000)
  546. test("unknown agent error includes available agent names", async () => {
  547. await using tmp = await tmpdir({ git: true })
  548. await Instance.provide({
  549. directory: tmp.path,
  550. fn: async () => {
  551. const session = await Session.create({})
  552. const err = await SessionPrompt.prompt({
  553. sessionID: session.id,
  554. agent: "nonexistent-agent-xyz",
  555. noReply: true,
  556. parts: [{ type: "text", text: "hello" }],
  557. }).then(
  558. () => undefined,
  559. (e) => e,
  560. )
  561. expect(NamedError.Unknown.isInstance(err)).toBe(true)
  562. if (NamedError.Unknown.isInstance(err)) {
  563. expect(err.data.message).toContain("code") // kilocode_change - "build" renamed to "code"
  564. }
  565. },
  566. })
  567. }, 30000)
  568. test("unknown command throws typed error with available names", async () => {
  569. await using tmp = await tmpdir({ git: true })
  570. await Instance.provide({
  571. directory: tmp.path,
  572. fn: async () => {
  573. const session = await Session.create({})
  574. const err = await SessionPrompt.command({
  575. sessionID: session.id,
  576. command: "nonexistent-command-xyz",
  577. arguments: "",
  578. }).then(
  579. () => undefined,
  580. (e) => e,
  581. )
  582. expect(err).toBeDefined()
  583. expect(err).not.toBeInstanceOf(TypeError)
  584. expect(NamedError.Unknown.isInstance(err)).toBe(true)
  585. if (NamedError.Unknown.isInstance(err)) {
  586. expect(err.data.message).toContain('Command not found: "nonexistent-command-xyz"')
  587. expect(err.data.message).toContain("init")
  588. }
  589. },
  590. })
  591. }, 30000)
  592. })