Просмотр исходного кода

refactor(test): migrate llm-server to Effect HTTP platform

- Replace Bun.serve with Effect HTTP server using NodeHttpServer
- Add TestLLMServer service for mock LLM testing with SSE responses
- Update prompt-provider.test.ts to use testEffect pattern with provideTmpdirInstance
- Remove redundant test/fixture/effect.ts (using existing test/lib/effect.ts instead)
Kit Langton 3 недель назад
Родитель
Сommit
459fbc99a8

+ 181 - 0
packages/opencode/test/lib/llm-server.ts

@@ -0,0 +1,181 @@
+import { NodeHttpServer } from "@effect/platform-node"
+import * as Http from "node:http"
+import { Effect, Layer, ServiceMap, Stream } from "effect"
+import * as HttpServer from "effect/unstable/http/HttpServer"
+import { HttpRouter, HttpServerRequest, HttpServerResponse } from "effect/unstable/http"
+
+type Step =
+  | {
+      type: "text"
+      text: string
+    }
+  | {
+      type: "tool"
+      tool: string
+      input: unknown
+    }
+
+type Hit = {
+  url: URL
+  body: Record<string, unknown>
+}
+
+function sse(lines: unknown[]) {
+  return HttpServerResponse.stream(
+    Stream.fromIterable([
+      [...lines.map((line) => `data: ${JSON.stringify(line)}`), "data: [DONE]"].join("\n\n") + "\n\n",
+    ]).pipe(Stream.encodeText),
+    { contentType: "text/event-stream" },
+  )
+}
+
+function text(step: Extract<Step, { type: "text" }>) {
+  return sse([
+    {
+      id: "chatcmpl-test",
+      object: "chat.completion.chunk",
+      choices: [{ delta: { role: "assistant" } }],
+    },
+    {
+      id: "chatcmpl-test",
+      object: "chat.completion.chunk",
+      choices: [{ delta: { content: step.text } }],
+    },
+    {
+      id: "chatcmpl-test",
+      object: "chat.completion.chunk",
+      choices: [{ delta: {}, finish_reason: "stop" }],
+    },
+  ])
+}
+
+function tool(step: Extract<Step, { type: "tool" }>, seq: number) {
+  const id = `call_${seq}`
+  const args = JSON.stringify(step.input)
+  return sse([
+    {
+      id: "chatcmpl-test",
+      object: "chat.completion.chunk",
+      choices: [{ delta: { role: "assistant" } }],
+    },
+    {
+      id: "chatcmpl-test",
+      object: "chat.completion.chunk",
+      choices: [
+        {
+          delta: {
+            tool_calls: [
+              {
+                index: 0,
+                id,
+                type: "function",
+                function: {
+                  name: step.tool,
+                  arguments: "",
+                },
+              },
+            ],
+          },
+        },
+      ],
+    },
+    {
+      id: "chatcmpl-test",
+      object: "chat.completion.chunk",
+      choices: [
+        {
+          delta: {
+            tool_calls: [
+              {
+                index: 0,
+                function: {
+                  arguments: args,
+                },
+              },
+            ],
+          },
+        },
+      ],
+    },
+    {
+      id: "chatcmpl-test",
+      object: "chat.completion.chunk",
+      choices: [{ delta: {}, finish_reason: "tool_calls" }],
+    },
+  ])
+}
+
+export class TestLLMServer extends ServiceMap.Service<
+  TestLLMServer,
+  {
+    readonly url: string
+    readonly text: (value: string) => Effect.Effect<void>
+    readonly tool: (tool: string, input: unknown) => Effect.Effect<void>
+    readonly hits: Effect.Effect<Hit[]>
+    readonly pending: Effect.Effect<number>
+  }
+>()("@test/LLMServer") {
+  static readonly layer = Layer.effect(
+    TestLLMServer,
+    Effect.gen(function* () {
+      const server = yield* HttpServer.HttpServer
+      const router = yield* HttpRouter.HttpRouter
+
+      let hits: Hit[] = []
+      let list: Step[] = []
+      let seq = 0
+
+      const push = (step: Step) => {
+        list = [...list, step]
+      }
+
+      const pull = () => {
+        const step = list[0]
+        if (!step) return { step: undefined, seq }
+        seq += 1
+        list = list.slice(1)
+        return { step, seq }
+      }
+
+      yield* router.add(
+        "POST",
+        "/v1/chat/completions",
+        Effect.gen(function* () {
+          const req = yield* HttpServerRequest.HttpServerRequest
+          const next = pull()
+          if (!next.step) return HttpServerResponse.text("unexpected request", { status: 500 })
+          const json = yield* req.json.pipe(Effect.orElseSucceed(() => ({})))
+          hits = [
+            ...hits,
+            {
+              url: new URL(req.originalUrl, "http://localhost"),
+              body: json && typeof json === "object" ? (json as Record<string, unknown>) : {},
+            },
+          ]
+          if (next.step.type === "text") return text(next.step)
+          return tool(next.step, next.seq)
+        }),
+      )
+
+      yield* server.serve(router.asHttpEffect())
+
+      return TestLLMServer.of({
+        url:
+          server.address._tag === "TcpAddress"
+            ? `http://127.0.0.1:${server.address.port}/v1`
+            : `unix://${server.address.path}/v1`,
+        text: Effect.fn("TestLLMServer.text")(function* (value: string) {
+          push({ type: "text", text: value })
+        }),
+        tool: Effect.fn("TestLLMServer.tool")(function* (tool: string, input: unknown) {
+          push({ type: "tool", tool, input })
+        }),
+        hits: Effect.sync(() => [...hits]),
+        pending: Effect.sync(() => list.length),
+      })
+    }),
+  ).pipe(
+    Layer.provide(HttpRouter.layer), //
+    Layer.provide(NodeHttpServer.layer(() => Http.createServer(), { port: 0 })),
+  )
+}

+ 140 - 0
packages/opencode/test/session/prompt-provider.test.ts

@@ -0,0 +1,140 @@
+import { describe, expect } from "bun:test"
+import { Effect } from "effect"
+import { NodeFileSystem } from "@effect/platform-node"
+import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
+import { Session } from "../../src/session"
+import { SessionPrompt } from "../../src/session/prompt"
+import { Log } from "../../src/util/log"
+import { testEffect } from "../lib/effect"
+import { provideTmpdirInstance } from "../fixture/fixture"
+import { TestLLMServer } from "../lib/llm-server"
+import { Layer } from "effect"
+
+Log.init({ print: false })
+
+const baseLayer = Layer.mergeAll(NodeFileSystem.layer, CrossSpawnSpawner.defaultLayer, TestLLMServer.layer)
+
+const it = testEffect(baseLayer)
+
+function makeConfig(url: string) {
+  return {
+    provider: {
+      test: {
+        name: "Test",
+        env: [],
+        npm: "@ai-sdk/openai-compatible",
+        models: {
+          "gpt-5-nano": {
+            id: "gpt-5-nano",
+            name: "Test Model",
+            attachment: false,
+            reasoning: false,
+            temperature: false,
+            tool_call: true,
+            release_date: "2025-01-01",
+            limit: { context: 100000, output: 10000 },
+            cost: { input: 0, output: 0 },
+            options: {},
+          },
+        },
+        options: {
+          apiKey: "test-key",
+          baseURL: url,
+        },
+      },
+    },
+    agent: {
+      build: {
+        model: "test/gpt-5-nano",
+      },
+    },
+  }
+}
+
+describe("session.prompt provider integration", () => {
+  it.effect("loop returns assistant text through local provider", () =>
+    Effect.gen(function* () {
+      const llm = yield* TestLLMServer
+      return yield* provideTmpdirInstance(
+        () =>
+          Effect.gen(function* () {
+            const session = yield* Effect.promise(() =>
+              Session.create({
+                title: "Prompt provider",
+                permission: [{ permission: "*", pattern: "*", action: "allow" }],
+              }),
+            )
+
+            yield* Effect.promise(() =>
+              SessionPrompt.prompt({
+                sessionID: session.id,
+                agent: "build",
+                noReply: true,
+                parts: [{ type: "text", text: "hello" }],
+              }),
+            )
+
+            yield* llm.text("world")
+
+            const result = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: session.id }))
+            expect(result.info.role).toBe("assistant")
+            expect(result.parts.some((part) => part.type === "text" && part.text === "world")).toBe(true)
+            expect(yield* llm.hits).toHaveLength(1)
+            expect(yield* llm.pending).toBe(0)
+          }),
+        { git: true, config: makeConfig(llm.url) },
+      )
+    }),
+  )
+
+  it.effect("loop consumes queued replies across turns", () =>
+    Effect.gen(function* () {
+      const llm = yield* TestLLMServer
+      return yield* provideTmpdirInstance(
+        () =>
+          Effect.gen(function* () {
+            const session = yield* Effect.promise(() =>
+              Session.create({
+                title: "Prompt provider turns",
+                permission: [{ permission: "*", pattern: "*", action: "allow" }],
+              }),
+            )
+
+            yield* Effect.promise(() =>
+              SessionPrompt.prompt({
+                sessionID: session.id,
+                agent: "build",
+                noReply: true,
+                parts: [{ type: "text", text: "hello one" }],
+              }),
+            )
+
+            yield* llm.text("world one")
+
+            const first = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: session.id }))
+            expect(first.info.role).toBe("assistant")
+            expect(first.parts.some((part) => part.type === "text" && part.text === "world one")).toBe(true)
+
+            yield* Effect.promise(() =>
+              SessionPrompt.prompt({
+                sessionID: session.id,
+                agent: "build",
+                noReply: true,
+                parts: [{ type: "text", text: "hello two" }],
+              }),
+            )
+
+            yield* llm.text("world two")
+
+            const second = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: session.id }))
+            expect(second.info.role).toBe("assistant")
+            expect(second.parts.some((part) => part.type === "text" && part.text === "world two")).toBe(true)
+
+            expect(yield* llm.hits).toHaveLength(2)
+            expect(yield* llm.pending).toBe(0)
+          }),
+        { git: true, config: makeConfig(llm.url) },
+      )
+    }),
+  )
+})