Parcourir la source

refactor(effect): move read tool onto defineEffect (#21016)

Kit Langton il y a 1 semaine
Parent
commit
c796b9a19e

+ 17 - 1
packages/opencode/specs/effect-migration.md

@@ -235,11 +235,27 @@ Once individual tools are effectified, change `Tool.Info` (`tool/tool.ts`) so `i
 2. Update `Tool.define()` factory to work with Effects
 3. Update `SessionPrompt` to `yield*` tool results instead of `await`ing
 
+### Tool migration details
+
+Until the tool interface itself returns `Effect`, use this transitional pattern for migrated tools:
+
+- `Tool.defineEffect(...)` should `yield*` the services the tool depends on and close over them in the returned tool definition.
+- Keep the bridge at the Promise boundary only. Prefer a single `Effect.runPromise(...)` in the temporary `async execute(...)` implementation, and move the inner logic into `Effect.fn(...)` helpers instead of scattering `runPromise` islands through the tool body.
+- If a tool starts requiring new services, wire them into `ToolRegistry.defaultLayer` so production callers resolve the same dependencies as tests.
+
+Tool tests should use the existing Effect helpers in `packages/opencode/test/lib/effect.ts`:
+
+- Use `testEffect(...)` / `it.live(...)` instead of creating fake local wrappers around effectful tools.
+- Yield the real tool export, then initialize it: `const info = yield* ReadTool`, `const tool = yield* Effect.promise(() => info.init())`.
+- Run tests inside a real instance with `provideTmpdirInstance(...)` or `provideInstance(tmpdirScoped(...))` so instance-scoped services resolve exactly as they do in production.
+
+This keeps migrated tool tests aligned with the production service graph today, and makes the eventual `Tool.Info` → `Effect` cleanup mostly mechanical later.
+
 Individual tools, ordered by value:
 
 - [ ] `apply_patch.ts` — HIGH: multi-step orchestration, error accumulation, Bus events
 - [ ] `bash.ts` — HIGH: shell orchestration, quoting, timeout handling, output capture
-- [ ] `read.ts` — HIGH: streaming I/O, readline, binary detection → FileSystem + Stream
+- [x] `read.ts` — HIGH: streaming I/O, readline, binary detection → FileSystem + Stream
 - [ ] `edit.ts` — HIGH: multi-step diff/format/publish pipeline, FileWatcher lock
 - [ ] `grep.ts` — MEDIUM: spawns ripgrep → ChildProcessSpawner, timeout handling
 - [ ] `write.ts` — MEDIUM: permission checks, diagnostics polling, Bus events

+ 12 - 2
packages/opencode/src/filesystem/index.ts

@@ -188,13 +188,23 @@ export namespace AppFileSystem {
 
   export function normalizePath(p: string): string {
     if (process.platform !== "win32") return p
+    const resolved = pathResolve(windowsPath(p))
     try {
-      return realpathSync.native(p)
+      return realpathSync.native(resolved)
     } catch {
-      return p
+      return resolved
     }
   }
 
+  export function normalizePathPattern(p: string): string {
+    if (process.platform !== "win32") return p
+    if (p === "*") return p
+    const match = p.match(/^(.*)[\\/]\*$/)
+    if (!match) return normalizePath(p)
+    const dir = /^[A-Za-z]:$/.test(match[1]) ? match[1] + "\\" : match[1]
+    return join(normalizePath(dir), "*")
+  }
+
   export function resolve(p: string): string {
     const resolved = pathResolve(windowsPath(p))
     try {

+ 12 - 3
packages/opencode/src/tool/external-directory.ts

@@ -1,7 +1,8 @@
 import path from "path"
+import { Effect } from "effect"
 import type { Tool } from "./tool"
 import { Instance } from "../project/instance"
-import { Filesystem } from "@/util/filesystem"
+import { AppFileSystem } from "../filesystem"
 
 type Kind = "file" | "directory"
 
@@ -15,14 +16,14 @@ export async function assertExternalDirectory(ctx: Tool.Context, target?: string
 
   if (options?.bypass) return
 
-  const full = process.platform === "win32" ? Filesystem.normalizePath(target) : target
+  const full = process.platform === "win32" ? AppFileSystem.normalizePath(target) : target
   if (Instance.containsPath(full)) return
 
   const kind = options?.kind ?? "file"
   const dir = kind === "directory" ? full : path.dirname(full)
   const glob =
     process.platform === "win32"
-      ? Filesystem.normalizePathPattern(path.join(dir, "*"))
+      ? AppFileSystem.normalizePathPattern(path.join(dir, "*"))
       : path.join(dir, "*").replaceAll("\\", "/")
 
   await ctx.ask({
@@ -35,3 +36,11 @@ export async function assertExternalDirectory(ctx: Tool.Context, target?: string
     },
   })
 }
+
+export const assertExternalDirectoryEffect = Effect.fn("Tool.assertExternalDirectory")(function* (
+  ctx: Tool.Context,
+  target?: string,
+  options?: Options,
+) {
+  yield* Effect.promise(() => assertExternalDirectory(ctx, target, options))
+})

+ 224 - 188
packages/opencode/src/tool/read.ts

@@ -1,16 +1,17 @@
 import z from "zod"
+import { Effect, Scope } from "effect"
 import { createReadStream } from "fs"
-import * as fs from "fs/promises"
+import { open } from "fs/promises"
 import * as path from "path"
 import { createInterface } from "readline"
 import { Tool } from "./tool"
+import { AppFileSystem } from "../filesystem"
 import { LSP } from "../lsp"
 import { FileTime } from "../file/time"
 import DESCRIPTION from "./read.txt"
 import { Instance } from "../project/instance"
-import { assertExternalDirectory } from "./external-directory"
+import { assertExternalDirectoryEffect } from "./external-directory"
 import { Instruction } from "../session/instruction"
-import { Filesystem } from "../util/filesystem"
 
 const DEFAULT_READ_LIMIT = 2000
 const MAX_LINE_LENGTH = 2000
@@ -18,222 +19,257 @@ const MAX_LINE_SUFFIX = `... (line truncated to ${MAX_LINE_LENGTH} chars)`
 const MAX_BYTES = 50 * 1024
 const MAX_BYTES_LABEL = `${MAX_BYTES / 1024} KB`
 
-export const ReadTool = Tool.define("read", {
-  description: DESCRIPTION,
-  parameters: z.object({
-    filePath: z.string().describe("The absolute path to the file or directory to read"),
-    offset: z.coerce.number().describe("The line number to start reading from (1-indexed)").optional(),
-    limit: z.coerce.number().describe("The maximum number of lines to read (defaults to 2000)").optional(),
-  }),
-  async execute(params, ctx) {
-    if (params.offset !== undefined && params.offset < 1) {
-      throw new Error("offset must be greater than or equal to 1")
-    }
-    let filepath = params.filePath
-    if (!path.isAbsolute(filepath)) {
-      filepath = path.resolve(Instance.directory, filepath)
-    }
-    if (process.platform === "win32") {
-      filepath = Filesystem.normalizePath(filepath)
-    }
-    const title = path.relative(Instance.worktree, filepath)
-
-    const stat = Filesystem.stat(filepath)
+const parameters = z.object({
+  filePath: z.string().describe("The absolute path to the file or directory to read"),
+  offset: z.coerce.number().describe("The line number to start reading from (1-indexed)").optional(),
+  limit: z.coerce.number().describe("The maximum number of lines to read (defaults to 2000)").optional(),
+})
 
-    await assertExternalDirectory(ctx, filepath, {
-      bypass: Boolean(ctx.extra?.["bypassCwdCheck"]),
-      kind: stat?.isDirectory() ? "directory" : "file",
-    })
+export const ReadTool = Tool.defineEffect(
+  "read",
+  Effect.gen(function* () {
+    const fs = yield* AppFileSystem.Service
+    const instruction = yield* Instruction.Service
+    const lsp = yield* LSP.Service
+    const time = yield* FileTime.Service
+    const scope = yield* Scope.Scope
 
-    await ctx.ask({
-      permission: "read",
-      patterns: [filepath],
-      always: ["*"],
-      metadata: {},
-    })
-
-    if (!stat) {
+    const miss = Effect.fn("ReadTool.miss")(function* (filepath: string) {
       const dir = path.dirname(filepath)
       const base = path.basename(filepath)
-
-      const suggestions = await fs
-        .readdir(dir)
-        .then((entries) =>
-          entries
+      const items = yield* fs.readDirectory(dir).pipe(
+        Effect.map((items) =>
+          items
             .filter(
-              (entry) =>
-                entry.toLowerCase().includes(base.toLowerCase()) || base.toLowerCase().includes(entry.toLowerCase()),
+              (item) =>
+                item.toLowerCase().includes(base.toLowerCase()) || base.toLowerCase().includes(item.toLowerCase()),
             )
-            .map((entry) => path.join(dir, entry))
+            .map((item) => path.join(dir, item))
             .slice(0, 3),
-        )
-        .catch(() => [])
+        ),
+        Effect.catch(() => Effect.succeed([] as string[])),
+      )
 
-      if (suggestions.length > 0) {
-        throw new Error(`File not found: ${filepath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`)
+      if (items.length > 0) {
+        return yield* Effect.fail(
+          new Error(`File not found: ${filepath}\n\nDid you mean one of these?\n${items.join("\n")}`),
+        )
       }
 
-      throw new Error(`File not found: ${filepath}`)
-    }
+      return yield* Effect.fail(new Error(`File not found: ${filepath}`))
+    })
 
-    if (stat.isDirectory()) {
-      const dirents = await fs.readdir(filepath, { withFileTypes: true })
-      const entries = await Promise.all(
-        dirents.map(async (dirent) => {
-          if (dirent.isDirectory()) return dirent.name + "/"
-          if (dirent.isSymbolicLink()) {
-            const target = await fs.stat(path.join(filepath, dirent.name)).catch(() => undefined)
-            if (target?.isDirectory()) return dirent.name + "/"
-          }
-          return dirent.name
+    const list = Effect.fn("ReadTool.list")(function* (filepath: string) {
+      const items = yield* fs.readDirectoryEntries(filepath)
+      return yield* Effect.forEach(
+        items,
+        Effect.fnUntraced(function* (item) {
+          if (item.type === "directory") return item.name + "/"
+          if (item.type !== "symlink") return item.name
+
+          const target = yield* fs
+            .stat(path.join(filepath, item.name))
+            .pipe(Effect.catch(() => Effect.succeed(undefined)))
+          if (target?.type === "Directory") return item.name + "/"
+          return item.name
         }),
-      )
-      entries.sort((a, b) => a.localeCompare(b))
-
-      const limit = params.limit ?? DEFAULT_READ_LIMIT
-      const offset = params.offset ?? 1
-      const start = offset - 1
-      const sliced = entries.slice(start, start + limit)
-      const truncated = start + sliced.length < entries.length
-
-      const output = [
-        `<path>${filepath}</path>`,
-        `<type>directory</type>`,
-        `<entries>`,
-        sliced.join("\n"),
-        truncated
-          ? `\n(Showing ${sliced.length} of ${entries.length} entries. Use 'offset' parameter to read beyond entry ${offset + sliced.length})`
-          : `\n(${entries.length} entries)`,
-        `</entries>`,
-      ].join("\n")
+        { concurrency: "unbounded" },
+      ).pipe(Effect.map((items: string[]) => items.sort((a, b) => a.localeCompare(b))))
+    })
 
-      return {
-        title,
-        output,
-        metadata: {
-          preview: sliced.slice(0, 20).join("\n"),
-          truncated,
-          loaded: [] as string[],
-        },
-      }
-    }
+    const warm = Effect.fn("ReadTool.warm")(function* (filepath: string, sessionID: Tool.Context["sessionID"]) {
+      yield* lsp.touchFile(filepath, false).pipe(Effect.ignore, Effect.forkIn(scope))
+      yield* time.read(sessionID, filepath)
+    })
 
-    const instructions = await Instruction.resolve(ctx.messages, filepath, ctx.messageID)
+    const run = Effect.fn("ReadTool.execute")(function* (params: z.infer<typeof parameters>, ctx: Tool.Context) {
+      if (params.offset !== undefined && params.offset < 1) {
+        return yield* Effect.fail(new Error("offset must be greater than or equal to 1"))
+      }
 
-    // Exclude SVG (XML-based) and vnd.fastbidsheet (.fbs extension, commonly FlatBuffers schema files)
-    const mime = Filesystem.mimeType(filepath)
-    const isImage = mime.startsWith("image/") && mime !== "image/svg+xml" && mime !== "image/vnd.fastbidsheet"
-    const isPdf = mime === "application/pdf"
-    if (isImage || isPdf) {
-      const msg = `${isImage ? "Image" : "PDF"} read successfully`
-      return {
-        title,
-        output: msg,
-        metadata: {
-          preview: msg,
-          truncated: false,
-          loaded: instructions.map((i) => i.filepath),
-        },
-        attachments: [
-          {
-            type: "file",
-            mime,
-            url: `data:${mime};base64,${Buffer.from(await Filesystem.readBytes(filepath)).toString("base64")}`,
-          },
-        ],
+      let filepath = params.filePath
+      if (!path.isAbsolute(filepath)) {
+        filepath = path.resolve(Instance.directory, filepath)
       }
-    }
+      if (process.platform === "win32") {
+        filepath = AppFileSystem.normalizePath(filepath)
+      }
+      const title = path.relative(Instance.worktree, filepath)
 
-    const isBinary = await isBinaryFile(filepath, Number(stat.size))
-    if (isBinary) throw new Error(`Cannot read binary file: ${filepath}`)
+      const stat = yield* fs.stat(filepath).pipe(
+        Effect.catchIf(
+          (err) => "reason" in err && err.reason._tag === "NotFound",
+          () => Effect.succeed(undefined),
+        ),
+      )
 
-    const stream = createReadStream(filepath, { encoding: "utf8" })
-    const rl = createInterface({
-      input: stream,
-      // Note: we use the crlfDelay option to recognize all instances of CR LF
-      // ('\r\n') in file as a single line break.
-      crlfDelay: Infinity,
-    })
+      yield* assertExternalDirectoryEffect(ctx, filepath, {
+        bypass: Boolean(ctx.extra?.["bypassCwdCheck"]),
+        kind: stat?.type === "Directory" ? "directory" : "file",
+      })
+
+      yield* Effect.promise(() =>
+        ctx.ask({
+          permission: "read",
+          patterns: [filepath],
+          always: ["*"],
+          metadata: {},
+        }),
+      )
 
-    const limit = params.limit ?? DEFAULT_READ_LIMIT
-    const offset = params.offset ?? 1
-    const start = offset - 1
-    const raw: string[] = []
-    let bytes = 0
-    let lines = 0
-    let truncatedByBytes = false
-    let hasMoreLines = false
-    try {
-      for await (const text of rl) {
-        lines += 1
-        if (lines <= start) continue
-
-        if (raw.length >= limit) {
-          hasMoreLines = true
-          continue
+      if (!stat) return yield* miss(filepath)
+
+      if (stat.type === "Directory") {
+        const items = yield* list(filepath)
+        const limit = params.limit ?? DEFAULT_READ_LIMIT
+        const offset = params.offset ?? 1
+        const start = offset - 1
+        const sliced = items.slice(start, start + limit)
+        const truncated = start + sliced.length < items.length
+
+        return {
+          title,
+          output: [
+            `<path>${filepath}</path>`,
+            `<type>directory</type>`,
+            `<entries>`,
+            sliced.join("\n"),
+            truncated
+              ? `\n(Showing ${sliced.length} of ${items.length} entries. Use 'offset' parameter to read beyond entry ${offset + sliced.length})`
+              : `\n(${items.length} entries)`,
+            `</entries>`,
+          ].join("\n"),
+          metadata: {
+            preview: sliced.slice(0, 20).join("\n"),
+            truncated,
+            loaded: [] as string[],
+          },
         }
+      }
 
-        const line = text.length > MAX_LINE_LENGTH ? text.substring(0, MAX_LINE_LENGTH) + MAX_LINE_SUFFIX : text
-        const size = Buffer.byteLength(line, "utf-8") + (raw.length > 0 ? 1 : 0)
-        if (bytes + size > MAX_BYTES) {
-          truncatedByBytes = true
-          hasMoreLines = true
-          break
+      const loaded = yield* instruction.resolve(ctx.messages, filepath, ctx.messageID)
+
+      const mime = AppFileSystem.mimeType(filepath)
+      const isImage = mime.startsWith("image/") && mime !== "image/svg+xml" && mime !== "image/vnd.fastbidsheet"
+      const isPdf = mime === "application/pdf"
+      if (isImage || isPdf) {
+        const msg = `${isImage ? "Image" : "PDF"} read successfully`
+        return {
+          title,
+          output: msg,
+          metadata: {
+            preview: msg,
+            truncated: false,
+            loaded: loaded.map((item) => item.filepath),
+          },
+          attachments: [
+            {
+              type: "file" as const,
+              mime,
+              url: `data:${mime};base64,${Buffer.from(yield* fs.readFile(filepath)).toString("base64")}`,
+            },
+          ],
         }
+      }
 
-        raw.push(line)
-        bytes += size
+      if (yield* Effect.promise(() => isBinaryFile(filepath, Number(stat.size)))) {
+        return yield* Effect.fail(new Error(`Cannot read binary file: ${filepath}`))
       }
-    } finally {
-      rl.close()
-      stream.destroy()
-    }
 
-    if (lines < offset && !(lines === 0 && offset === 1)) {
-      throw new Error(`Offset ${offset} is out of range for this file (${lines} lines)`)
-    }
+      const file = yield* Effect.promise(() =>
+        lines(filepath, { limit: params.limit ?? DEFAULT_READ_LIMIT, offset: params.offset ?? 1 }),
+      )
+      if (file.count < file.offset && !(file.count === 0 && file.offset === 1)) {
+        return yield* Effect.fail(
+          new Error(`Offset ${file.offset} is out of range for this file (${file.count} lines)`),
+        )
+      }
 
-    const content = raw.map((line, index) => {
-      return `${index + offset}: ${line}`
-    })
-    const preview = raw.slice(0, 20).join("\n")
-
-    let output = [`<path>${filepath}</path>`, `<type>file</type>`, "<content>"].join("\n")
-    output += content.join("\n")
-
-    const totalLines = lines
-    const lastReadLine = offset + raw.length - 1
-    const nextOffset = lastReadLine + 1
-    const truncated = hasMoreLines || truncatedByBytes
-
-    if (truncatedByBytes) {
-      output += `\n\n(Output capped at ${MAX_BYTES_LABEL}. Showing lines ${offset}-${lastReadLine}. Use offset=${nextOffset} to continue.)`
-    } else if (hasMoreLines) {
-      output += `\n\n(Showing lines ${offset}-${lastReadLine} of ${totalLines}. Use offset=${nextOffset} to continue.)`
-    } else {
-      output += `\n\n(End of file - total ${totalLines} lines)`
-    }
-    output += "\n</content>"
+      let output = [`<path>${filepath}</path>`, `<type>file</type>`, "<content>"].join("\n")
+      output += file.raw.map((line, i) => `${i + file.offset}: ${line}`).join("\n")
+
+      const last = file.offset + file.raw.length - 1
+      const next = last + 1
+      const truncated = file.more || file.cut
+      if (file.cut) {
+        output += `\n\n(Output capped at ${MAX_BYTES_LABEL}. Showing lines ${file.offset}-${last}. Use offset=${next} to continue.)`
+      } else if (file.more) {
+        output += `\n\n(Showing lines ${file.offset}-${last} of ${file.count}. Use offset=${next} to continue.)`
+      } else {
+        output += `\n\n(End of file - total ${file.count} lines)`
+      }
+      output += "\n</content>"
 
-    // just warms the lsp client
-    LSP.touchFile(filepath, false)
-    await FileTime.read(ctx.sessionID, filepath)
+      yield* warm(filepath, ctx.sessionID)
 
-    if (instructions.length > 0) {
-      output += `\n\n<system-reminder>\n${instructions.map((i) => i.content).join("\n\n")}\n</system-reminder>`
-    }
+      if (loaded.length > 0) {
+        output += `\n\n<system-reminder>\n${loaded.map((item) => item.content).join("\n\n")}\n</system-reminder>`
+      }
+
+      return {
+        title,
+        output,
+        metadata: {
+          preview: file.raw.slice(0, 20).join("\n"),
+          truncated,
+          loaded: loaded.map((item) => item.filepath),
+        },
+      }
+    })
 
     return {
-      title,
-      output,
-      metadata: {
-        preview,
-        truncated,
-        loaded: instructions.map((i) => i.filepath),
+      description: DESCRIPTION,
+      parameters,
+      async execute(params: z.infer<typeof parameters>, ctx) {
+        return Effect.runPromise(run(params, ctx).pipe(Effect.orDie))
       },
     }
-  },
-})
+  }),
+)
+
+async function lines(filepath: string, opts: { limit: number; offset: number }) {
+  const stream = createReadStream(filepath, { encoding: "utf8" })
+  const rl = createInterface({
+    input: stream,
+    // Note: we use the crlfDelay option to recognize all instances of CR LF
+    // ('\r\n') in file as a single line break.
+    crlfDelay: Infinity,
+  })
+
+  const start = opts.offset - 1
+  const raw: string[] = []
+  let bytes = 0
+  let count = 0
+  let cut = false
+  let more = false
+  try {
+    for await (const text of rl) {
+      count += 1
+      if (count <= start) continue
+
+      if (raw.length >= opts.limit) {
+        more = true
+        continue
+      }
+
+      const line = text.length > MAX_LINE_LENGTH ? text.substring(0, MAX_LINE_LENGTH) + MAX_LINE_SUFFIX : text
+      const size = Buffer.byteLength(line, "utf-8") + (raw.length > 0 ? 1 : 0)
+      if (bytes + size > MAX_BYTES) {
+        cut = true
+        more = true
+        break
+      }
+
+      raw.push(line)
+      bytes += size
+    }
+  } finally {
+    rl.close()
+    stream.destroy()
+  }
+
+  return { raw, count, cut, more, offset: opts.offset }
+}
 
 async function isBinaryFile(filepath: string, fileSize: number): Promise<boolean> {
   const ext = path.extname(filepath).toLowerCase()
@@ -274,7 +310,7 @@ async function isBinaryFile(filepath: string, fileSize: number): Promise<boolean
 
   if (fileSize === 0) return false
 
-  const fh = await fs.open(filepath, "r")
+  const fh = await open(filepath, "r")
   try {
     const sampleSize = Math.min(4096, fileSize)
     const bytes = Buffer.alloc(sampleSize)

+ 165 - 148
packages/opencode/src/tool/registry.ts

@@ -35,6 +35,10 @@ import { makeRuntime } from "@/effect/run-service"
 import { Env } from "../env"
 import { Question } from "../question"
 import { Todo } from "../session/todo"
+import { LSP } from "../lsp"
+import { FileTime } from "../file/time"
+import { Instruction } from "../session/instruction"
+import { AppFileSystem } from "../filesystem"
 
 export namespace ToolRegistry {
   const log = Log.create({ service: "tool.registry" })
@@ -57,167 +61,176 @@ export namespace ToolRegistry {
 
   export class Service extends ServiceMap.Service<Service, Interface>()("@opencode/ToolRegistry") {}
 
-  export const layer: Layer.Layer<Service, never, Config.Service | Plugin.Service | Question.Service | Todo.Service> =
-    Layer.effect(
-      Service,
-      Effect.gen(function* () {
-        const config = yield* Config.Service
-        const plugin = yield* Plugin.Service
-
-        const build = <T extends Tool.Info>(tool: T | Effect.Effect<T, never, any>) =>
-          Effect.isEffect(tool) ? tool : Effect.succeed(tool)
-
-        const state = yield* InstanceState.make<State>(
-          Effect.fn("ToolRegistry.state")(function* (ctx) {
-            const custom: Tool.Info[] = []
-
-            function fromPlugin(id: string, def: ToolDefinition): Tool.Info {
-              return {
-                id,
-                init: async (initCtx) => ({
-                  parameters: z.object(def.args),
-                  description: def.description,
-                  execute: async (args, toolCtx) => {
-                    const pluginCtx = {
-                      ...toolCtx,
-                      directory: ctx.directory,
-                      worktree: ctx.worktree,
-                    } as unknown as PluginToolContext
-                    const result = await def.execute(args as any, pluginCtx)
-                    const out = await Truncate.output(result, {}, initCtx?.agent)
-                    return {
-                      title: "",
-                      output: out.truncated ? out.content : result,
-                      metadata: { truncated: out.truncated, outputPath: out.truncated ? out.outputPath : undefined },
-                    }
-                  },
-                }),
-              }
+  export const layer: Layer.Layer<
+    Service,
+    never,
+    | Config.Service
+    | Plugin.Service
+    | Question.Service
+    | Todo.Service
+    | LSP.Service
+    | FileTime.Service
+    | Instruction.Service
+    | AppFileSystem.Service
+  > = Layer.effect(
+    Service,
+    Effect.gen(function* () {
+      const config = yield* Config.Service
+      const plugin = yield* Plugin.Service
+
+      const build = <T extends Tool.Info>(tool: T | Effect.Effect<T, never, any>) =>
+        Effect.isEffect(tool) ? tool : Effect.succeed(tool)
+
+      const state = yield* InstanceState.make<State>(
+        Effect.fn("ToolRegistry.state")(function* (ctx) {
+          const custom: Tool.Info[] = []
+
+          function fromPlugin(id: string, def: ToolDefinition): Tool.Info {
+            return {
+              id,
+              init: async (initCtx) => ({
+                parameters: z.object(def.args),
+                description: def.description,
+                execute: async (args, toolCtx) => {
+                  const pluginCtx = {
+                    ...toolCtx,
+                    directory: ctx.directory,
+                    worktree: ctx.worktree,
+                  } as unknown as PluginToolContext
+                  const result = await def.execute(args as any, pluginCtx)
+                  const out = await Truncate.output(result, {}, initCtx?.agent)
+                  return {
+                    title: "",
+                    output: out.truncated ? out.content : result,
+                    metadata: { truncated: out.truncated, outputPath: out.truncated ? out.outputPath : undefined },
+                  }
+                },
+              }),
             }
+          }
 
-            const dirs = yield* config.directories()
-            const matches = dirs.flatMap((dir) =>
-              Glob.scanSync("{tool,tools}/*.{js,ts}", { cwd: dir, absolute: true, dot: true, symlink: true }),
+          const dirs = yield* config.directories()
+          const matches = dirs.flatMap((dir) =>
+            Glob.scanSync("{tool,tools}/*.{js,ts}", { cwd: dir, absolute: true, dot: true, symlink: true }),
+          )
+          if (matches.length) yield* config.waitForDependencies()
+          for (const match of matches) {
+            const namespace = path.basename(match, path.extname(match))
+            const mod = yield* Effect.promise(
+              () => import(process.platform === "win32" ? match : pathToFileURL(match).href),
             )
-            if (matches.length) yield* config.waitForDependencies()
-            for (const match of matches) {
-              const namespace = path.basename(match, path.extname(match))
-              const mod = yield* Effect.promise(
-                () => import(process.platform === "win32" ? match : pathToFileURL(match).href),
-              )
-              for (const [id, def] of Object.entries<ToolDefinition>(mod)) {
-                custom.push(fromPlugin(id === "default" ? namespace : `${namespace}_${id}`, def))
-              }
+            for (const [id, def] of Object.entries<ToolDefinition>(mod)) {
+              custom.push(fromPlugin(id === "default" ? namespace : `${namespace}_${id}`, def))
             }
+          }
 
-            const plugins = yield* plugin.list()
-            for (const p of plugins) {
-              for (const [id, def] of Object.entries(p.tool ?? {})) {
-                custom.push(fromPlugin(id, def))
-              }
+          const plugins = yield* plugin.list()
+          for (const p of plugins) {
+            for (const [id, def] of Object.entries(p.tool ?? {})) {
+              custom.push(fromPlugin(id, def))
             }
+          }
 
-            return { custom }
-          }),
-        )
+          return { custom }
+        }),
+      )
 
-        const invalid = yield* build(InvalidTool)
-        const ask = yield* build(QuestionTool)
-        const bash = yield* build(BashTool)
-        const read = yield* build(ReadTool)
-        const glob = yield* build(GlobTool)
-        const grep = yield* build(GrepTool)
-        const edit = yield* build(EditTool)
-        const write = yield* build(WriteTool)
-        const task = yield* build(TaskTool)
-        const fetch = yield* build(WebFetchTool)
-        const todo = yield* build(TodoWriteTool)
-        const search = yield* build(WebSearchTool)
-        const code = yield* build(CodeSearchTool)
-        const skill = yield* build(SkillTool)
-        const patch = yield* build(ApplyPatchTool)
-        const lsp = yield* build(LspTool)
-        const batch = yield* build(BatchTool)
-        const plan = yield* build(PlanExitTool)
-
-        const all = Effect.fn("ToolRegistry.all")(function* (custom: Tool.Info[]) {
-          const cfg = yield* config.get()
-          const question =
-            ["app", "cli", "desktop"].includes(Flag.OPENCODE_CLIENT) || Flag.OPENCODE_ENABLE_QUESTION_TOOL
-
-          return [
-            invalid,
-            ...(question ? [ask] : []),
-            bash,
-            read,
-            glob,
-            grep,
-            edit,
-            write,
-            task,
-            fetch,
-            todo,
-            search,
-            code,
-            skill,
-            patch,
-            ...(Flag.OPENCODE_EXPERIMENTAL_LSP_TOOL ? [lsp] : []),
-            ...(cfg.experimental?.batch_tool === true ? [batch] : []),
-            ...(Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE && Flag.OPENCODE_CLIENT === "cli" ? [plan] : []),
-            ...custom,
-          ]
-        })
+      const invalid = yield* build(InvalidTool)
+      const ask = yield* build(QuestionTool)
+      const bash = yield* build(BashTool)
+      const read = yield* build(ReadTool)
+      const glob = yield* build(GlobTool)
+      const grep = yield* build(GrepTool)
+      const edit = yield* build(EditTool)
+      const write = yield* build(WriteTool)
+      const task = yield* build(TaskTool)
+      const fetch = yield* build(WebFetchTool)
+      const todo = yield* build(TodoWriteTool)
+      const search = yield* build(WebSearchTool)
+      const code = yield* build(CodeSearchTool)
+      const skill = yield* build(SkillTool)
+      const patch = yield* build(ApplyPatchTool)
+      const lsp = yield* build(LspTool)
+      const batch = yield* build(BatchTool)
+      const plan = yield* build(PlanExitTool)
 
-        const ids = Effect.fn("ToolRegistry.ids")(function* () {
-          const s = yield* InstanceState.get(state)
-          const tools = yield* all(s.custom)
-          return tools.map((t) => t.id)
-        })
+      const all = Effect.fn("ToolRegistry.all")(function* (custom: Tool.Info[]) {
+        const cfg = yield* config.get()
+        const question = ["app", "cli", "desktop"].includes(Flag.OPENCODE_CLIENT) || Flag.OPENCODE_ENABLE_QUESTION_TOOL
 
-        const tools = Effect.fn("ToolRegistry.tools")(function* (
-          model: { providerID: ProviderID; modelID: ModelID },
-          agent?: Agent.Info,
-        ) {
-          const s = yield* InstanceState.get(state)
-          const allTools = yield* all(s.custom)
-          const filtered = allTools.filter((tool) => {
-            if (tool.id === "codesearch" || tool.id === "websearch") {
-              return model.providerID === ProviderID.opencode || Flag.OPENCODE_ENABLE_EXA
-            }
+        return [
+          invalid,
+          ...(question ? [ask] : []),
+          bash,
+          read,
+          glob,
+          grep,
+          edit,
+          write,
+          task,
+          fetch,
+          todo,
+          search,
+          code,
+          skill,
+          patch,
+          ...(Flag.OPENCODE_EXPERIMENTAL_LSP_TOOL ? [lsp] : []),
+          ...(cfg.experimental?.batch_tool === true ? [batch] : []),
+          ...(Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE && Flag.OPENCODE_CLIENT === "cli" ? [plan] : []),
+          ...custom,
+        ]
+      })
 
-            const usePatch =
-              !!Env.get("OPENCODE_E2E_LLM_URL") ||
-              (model.modelID.includes("gpt-") && !model.modelID.includes("oss") && !model.modelID.includes("gpt-4"))
-            if (tool.id === "apply_patch") return usePatch
-            if (tool.id === "edit" || tool.id === "write") return !usePatch
-
-            return true
-          })
-          return yield* Effect.forEach(
-            filtered,
-            Effect.fnUntraced(function* (tool: Tool.Info) {
-              using _ = log.time(tool.id)
-              const next = yield* Effect.promise(() => tool.init({ agent }))
-              const output = {
-                description: next.description,
-                parameters: next.parameters,
-              }
-              yield* plugin.trigger("tool.definition", { toolID: tool.id }, output)
-              return {
-                id: tool.id,
-                description: output.description,
-                parameters: output.parameters,
-                execute: next.execute,
-                formatValidationError: next.formatValidationError,
-              }
-            }),
-            { concurrency: "unbounded" },
-          )
+      const ids = Effect.fn("ToolRegistry.ids")(function* () {
+        const s = yield* InstanceState.get(state)
+        const tools = yield* all(s.custom)
+        return tools.map((t) => t.id)
+      })
+
+      const tools = Effect.fn("ToolRegistry.tools")(function* (
+        model: { providerID: ProviderID; modelID: ModelID },
+        agent?: Agent.Info,
+      ) {
+        const s = yield* InstanceState.get(state)
+        const allTools = yield* all(s.custom)
+        const filtered = allTools.filter((tool) => {
+          if (tool.id === "codesearch" || tool.id === "websearch") {
+            return model.providerID === ProviderID.opencode || Flag.OPENCODE_ENABLE_EXA
+          }
+
+          const usePatch =
+            !!Env.get("OPENCODE_E2E_LLM_URL") ||
+            (model.modelID.includes("gpt-") && !model.modelID.includes("oss") && !model.modelID.includes("gpt-4"))
+          if (tool.id === "apply_patch") return usePatch
+          if (tool.id === "edit" || tool.id === "write") return !usePatch
+
+          return true
         })
+        return yield* Effect.forEach(
+          filtered,
+          Effect.fnUntraced(function* (tool: Tool.Info) {
+            using _ = log.time(tool.id)
+            const next = yield* Effect.promise(() => tool.init({ agent }))
+            const output = {
+              description: next.description,
+              parameters: next.parameters,
+            }
+            yield* plugin.trigger("tool.definition", { toolID: tool.id }, output)
+            return {
+              id: tool.id,
+              description: output.description,
+              parameters: output.parameters,
+              execute: next.execute,
+              formatValidationError: next.formatValidationError,
+            }
+          }),
+          { concurrency: "unbounded" },
+        )
+      })
 
-        return Service.of({ ids, named: { task, read }, tools })
-      }),
-    )
+      return Service.of({ ids, named: { task, read }, tools })
+    }),
+  )
 
   export const defaultLayer = Layer.unwrap(
     Effect.sync(() =>
@@ -226,6 +239,10 @@ export namespace ToolRegistry {
         Layer.provide(Plugin.defaultLayer),
         Layer.provide(Question.defaultLayer),
         Layer.provide(Todo.defaultLayer),
+        Layer.provide(LSP.defaultLayer),
+        Layer.provide(FileTime.defaultLayer),
+        Layer.provide(Instruction.defaultLayer),
+        Layer.provide(AppFileSystem.defaultLayer),
       ),
     ),
   )

+ 392 - 470
packages/opencode/test/tool/read.test.ts

@@ -1,12 +1,20 @@
-import { afterEach, describe, expect, test } from "bun:test"
+import { afterEach, describe, expect } from "bun:test"
+import { Cause, Effect, Exit, Layer } from "effect"
 import path from "path"
-import { ReadTool } from "../../src/tool/read"
-import { Instance } from "../../src/project/instance"
-import { Filesystem } from "../../src/util/filesystem"
-import { tmpdir } from "../fixture/fixture"
-import { Permission } from "../../src/permission"
 import { Agent } from "../../src/agent/agent"
+import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
+import { AppFileSystem } from "../../src/filesystem"
+import { FileTime } from "../../src/file/time"
+import { LSP } from "../../src/lsp"
+import { Permission } from "../../src/permission"
+import { Instance } from "../../src/project/instance"
 import { SessionID, MessageID } from "../../src/session/schema"
+import { Instruction } from "../../src/session/instruction"
+import { ReadTool } from "../../src/tool/read"
+import { Tool } from "../../src/tool/tool"
+import { Filesystem } from "../../src/util/filesystem"
+import { provideInstance, tmpdirScoped } from "../fixture/fixture"
+import { testEffect } from "../lib/effect"
 
 const FIXTURES_DIR = path.join(import.meta.dir, "fixtures")
 
@@ -25,173 +33,171 @@ const ctx = {
   ask: async () => {},
 }
 
+const it = testEffect(
+  Layer.mergeAll(
+    Agent.defaultLayer,
+    AppFileSystem.defaultLayer,
+    CrossSpawnSpawner.defaultLayer,
+    FileTime.defaultLayer,
+    Instruction.defaultLayer,
+    LSP.defaultLayer,
+  ),
+)
+
+const init = Effect.fn("ReadToolTest.init")(function* () {
+  const info = yield* ReadTool
+  return yield* Effect.promise(() => info.init())
+})
+
+const run = Effect.fn("ReadToolTest.run")(function* (
+  args: Tool.InferParameters<typeof ReadTool>,
+  next: Tool.Context = ctx,
+) {
+  const tool = yield* init()
+  return yield* Effect.promise(() => tool.execute(args, next))
+})
+
+const exec = Effect.fn("ReadToolTest.exec")(function* (
+  dir: string,
+  args: Tool.InferParameters<typeof ReadTool>,
+  next: Tool.Context = ctx,
+) {
+  return yield* provideInstance(dir)(run(args, next))
+})
+
+const fail = Effect.fn("ReadToolTest.fail")(function* (
+  dir: string,
+  args: Tool.InferParameters<typeof ReadTool>,
+  next: Tool.Context = ctx,
+) {
+  const exit = yield* exec(dir, args, next).pipe(Effect.exit)
+  if (Exit.isFailure(exit)) {
+    const err = Cause.squash(exit.cause)
+    return err instanceof Error ? err : new Error(String(err))
+  }
+  throw new Error("expected read to fail")
+})
+
 const full = (p: string) => (process.platform === "win32" ? Filesystem.normalizePath(p) : p)
 const glob = (p: string) =>
   process.platform === "win32" ? Filesystem.normalizePathPattern(p) : p.replaceAll("\\", "/")
+const put = Effect.fn("ReadToolTest.put")(function* (p: string, content: string | Buffer | Uint8Array) {
+  const fs = yield* AppFileSystem.Service
+  yield* fs.writeWithDirs(p, content)
+})
+const load = Effect.fn("ReadToolTest.load")(function* (p: string) {
+  const fs = yield* AppFileSystem.Service
+  return yield* fs.readFileString(p)
+})
+const asks = () => {
+  const items: Array<Omit<Permission.Request, "id" | "sessionID" | "tool">> = []
+  return {
+    items,
+    next: {
+      ...ctx,
+      ask: async (req: Omit<Permission.Request, "id" | "sessionID" | "tool">) => {
+        items.push(req)
+      },
+    },
+  }
+}
 
 describe("tool.read external_directory permission", () => {
-  test("allows reading absolute path inside project directory", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        await Bun.write(path.join(dir, "test.txt"), "hello world")
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "test.txt") }, ctx)
-        expect(result.output).toContain("hello world")
-      },
-    })
-  })
+  it.live("allows reading absolute path inside project directory", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      yield* put(path.join(dir, "test.txt"), "hello world")
 
-  test("allows reading file in subdirectory inside project directory", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        await Bun.write(path.join(dir, "subdir", "test.txt"), "nested content")
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "subdir", "test.txt") }, ctx)
-        expect(result.output).toContain("nested content")
-      },
-    })
-  })
+      const result = yield* exec(dir, { filePath: path.join(dir, "test.txt") })
+      expect(result.output).toContain("hello world")
+    }),
+  )
 
-  test("asks for external_directory permission when reading absolute path outside project", async () => {
-    await using outerTmp = await tmpdir({
-      init: async (dir) => {
-        await Bun.write(path.join(dir, "secret.txt"), "secret data")
-      },
-    })
-    await using tmp = await tmpdir({ git: true })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const requests: Array<Omit<Permission.Request, "id" | "sessionID" | "tool">> = []
-        const testCtx = {
-          ...ctx,
-          ask: async (req: Omit<Permission.Request, "id" | "sessionID" | "tool">) => {
-            requests.push(req)
-          },
-        }
-        await read.execute({ filePath: path.join(outerTmp.path, "secret.txt") }, testCtx)
-        const extDirReq = requests.find((r) => r.permission === "external_directory")
-        expect(extDirReq).toBeDefined()
-        expect(extDirReq!.patterns).toContain(glob(path.join(outerTmp.path, "*")))
-      },
-    })
-  })
+  it.live("allows reading file in subdirectory inside project directory", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      yield* put(path.join(dir, "subdir", "test.txt"), "nested content")
+
+      const result = yield* exec(dir, { filePath: path.join(dir, "subdir", "test.txt") })
+      expect(result.output).toContain("nested content")
+    }),
+  )
+
+  it.live("asks for external_directory permission when reading absolute path outside project", () =>
+    Effect.gen(function* () {
+      const outer = yield* tmpdirScoped()
+      const dir = yield* tmpdirScoped({ git: true })
+      yield* put(path.join(outer, "secret.txt"), "secret data")
+
+      const { items, next } = asks()
+
+      yield* exec(dir, { filePath: path.join(outer, "secret.txt") }, next)
+      const ext = items.find((item) => item.permission === "external_directory")
+      expect(ext).toBeDefined()
+      expect(ext!.patterns).toContain(glob(path.join(outer, "*")))
+    }),
+  )
 
   if (process.platform === "win32") {
-    test("normalizes read permission paths on Windows", async () => {
-      await using tmp = await tmpdir({
-        git: true,
-        init: async (dir) => {
-          await Bun.write(path.join(dir, "test.txt"), "hello world")
-        },
-      })
-      await Instance.provide({
-        directory: tmp.path,
-        fn: async () => {
-          const read = await ReadTool.init()
-          const requests: Array<Omit<Permission.Request, "id" | "sessionID" | "tool">> = []
-          const testCtx = {
-            ...ctx,
-            ask: async (req: Omit<Permission.Request, "id" | "sessionID" | "tool">) => {
-              requests.push(req)
-            },
-          }
-          const target = path.join(tmp.path, "test.txt")
-          const alt = target
-            .replace(/^[A-Za-z]:/, "")
-            .replaceAll("\\", "/")
-            .toLowerCase()
-          await read.execute({ filePath: alt }, testCtx)
-          const readReq = requests.find((r) => r.permission === "read")
-          expect(readReq).toBeDefined()
-          expect(readReq!.patterns).toEqual([full(target)])
-        },
-      })
-    })
+    it.live("normalizes read permission paths on Windows", () =>
+      Effect.gen(function* () {
+        const dir = yield* tmpdirScoped({ git: true })
+        yield* put(path.join(dir, "test.txt"), "hello world")
+
+        const { items, next } = asks()
+        const target = path.join(dir, "test.txt")
+        const alt = target
+          .replace(/^[A-Za-z]:/, "")
+          .replaceAll("\\", "/")
+          .toLowerCase()
+
+        yield* exec(dir, { filePath: alt }, next)
+        const read = items.find((item) => item.permission === "read")
+        expect(read).toBeDefined()
+        expect(read!.patterns).toEqual([full(target)])
+      }),
+    )
   }
 
-  test("asks for directory-scoped external_directory permission when reading external directory", async () => {
-    await using outerTmp = await tmpdir({
-      init: async (dir) => {
-        await Bun.write(path.join(dir, "external", "a.txt"), "a")
-      },
-    })
-    await using tmp = await tmpdir({ git: true })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const requests: Array<Omit<Permission.Request, "id" | "sessionID" | "tool">> = []
-        const testCtx = {
-          ...ctx,
-          ask: async (req: Omit<Permission.Request, "id" | "sessionID" | "tool">) => {
-            requests.push(req)
-          },
-        }
-        await read.execute({ filePath: path.join(outerTmp.path, "external") }, testCtx)
-        const extDirReq = requests.find((r) => r.permission === "external_directory")
-        expect(extDirReq).toBeDefined()
-        expect(extDirReq!.patterns).toContain(glob(path.join(outerTmp.path, "external", "*")))
-      },
-    })
-  })
-
-  test("asks for external_directory permission when reading relative path outside project", async () => {
-    await using tmp = await tmpdir({ git: true })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const requests: Array<Omit<Permission.Request, "id" | "sessionID" | "tool">> = []
-        const testCtx = {
-          ...ctx,
-          ask: async (req: Omit<Permission.Request, "id" | "sessionID" | "tool">) => {
-            requests.push(req)
-          },
-        }
-        // This will fail because file doesn't exist, but we can check if permission was asked
-        await read.execute({ filePath: "../outside.txt" }, testCtx).catch(() => {})
-        const extDirReq = requests.find((r) => r.permission === "external_directory")
-        expect(extDirReq).toBeDefined()
-      },
-    })
-  })
+  it.live("asks for directory-scoped external_directory permission when reading external directory", () =>
+    Effect.gen(function* () {
+      const outer = yield* tmpdirScoped()
+      const dir = yield* tmpdirScoped({ git: true })
+      yield* put(path.join(outer, "external", "a.txt"), "a")
 
-  test("does not ask for external_directory permission when reading inside project", async () => {
-    await using tmp = await tmpdir({
-      git: true,
-      init: async (dir) => {
-        await Bun.write(path.join(dir, "internal.txt"), "internal content")
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const requests: Array<Omit<Permission.Request, "id" | "sessionID" | "tool">> = []
-        const testCtx = {
-          ...ctx,
-          ask: async (req: Omit<Permission.Request, "id" | "sessionID" | "tool">) => {
-            requests.push(req)
-          },
-        }
-        await read.execute({ filePath: path.join(tmp.path, "internal.txt") }, testCtx)
-        const extDirReq = requests.find((r) => r.permission === "external_directory")
-        expect(extDirReq).toBeUndefined()
-      },
-    })
-  })
+      const { items, next } = asks()
+
+      yield* exec(dir, { filePath: path.join(outer, "external") }, next)
+      const ext = items.find((item) => item.permission === "external_directory")
+      expect(ext).toBeDefined()
+      expect(ext!.patterns).toContain(glob(path.join(outer, "external", "*")))
+    }),
+  )
+
+  it.live("asks for external_directory permission when reading relative path outside project", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped({ git: true })
+
+      const { items, next } = asks()
+
+      yield* fail(dir, { filePath: "../outside.txt" }, next)
+      const ext = items.find((item) => item.permission === "external_directory")
+      expect(ext).toBeDefined()
+    }),
+  )
+
+  it.live("does not ask for external_directory permission when reading inside project", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped({ git: true })
+      yield* put(path.join(dir, "internal.txt"), "internal content")
+
+      const { items, next } = asks()
+
+      yield* exec(dir, { filePath: path.join(dir, "internal.txt") }, next)
+      const ext = items.find((item) => item.permission === "external_directory")
+      expect(ext).toBeUndefined()
+    }),
+  )
 })
 
 describe("tool.read env file permissions", () => {
@@ -205,261 +211,204 @@ describe("tool.read env file permissions", () => {
     ["environment.ts", false],
   ]
 
-  describe.each(["build", "plan"])("agent=%s", (agentName) => {
-    test.each(cases)("%s asks=%s", async (filename, shouldAsk) => {
-      await using tmp = await tmpdir({
-        init: (dir) => Bun.write(path.join(dir, filename), "content"),
-      })
-      await Instance.provide({
-        directory: tmp.path,
-        fn: async () => {
-          const agent = await Agent.get(agentName)
-          let askedForEnv = false
-          const ctxWithPermissions = {
-            ...ctx,
-            ask: async (req: Omit<Permission.Request, "id" | "sessionID" | "tool">) => {
-              for (const pattern of req.patterns) {
-                const rule = Permission.evaluate(req.permission, pattern, agent.permission)
-                if (rule.action === "ask" && req.permission === "read") {
-                  askedForEnv = true
-                }
-                if (rule.action === "deny") {
-                  throw new Permission.DeniedError({ ruleset: agent.permission })
+  for (const agentName of ["build", "plan"] as const) {
+    describe(`agent=${agentName}`, () => {
+      for (const [filename, shouldAsk] of cases) {
+        it.live(`${filename} asks=${shouldAsk}`, () =>
+          Effect.gen(function* () {
+            const dir = yield* tmpdirScoped()
+            yield* put(path.join(dir, filename), "content")
+
+            const asked = yield* provideInstance(dir)(
+              Effect.gen(function* () {
+                const agent = yield* Agent.Service
+                const info = yield* agent.get(agentName)
+                let asked = false
+                const next = {
+                  ...ctx,
+                  ask: async (req: Omit<Permission.Request, "id" | "sessionID" | "tool">) => {
+                    for (const pattern of req.patterns) {
+                      const rule = Permission.evaluate(req.permission, pattern, info.permission)
+                      if (rule.action === "ask" && req.permission === "read") {
+                        asked = true
+                      }
+                      if (rule.action === "deny") {
+                        throw new Permission.DeniedError({ ruleset: info.permission })
+                      }
+                    }
+                  },
                 }
-              }
-            },
-          }
-          const read = await ReadTool.init()
-          await read.execute({ filePath: path.join(tmp.path, filename) }, ctxWithPermissions)
-          expect(askedForEnv).toBe(shouldAsk)
-        },
-      })
+
+                yield* run({ filePath: path.join(dir, filename) }, next)
+                return asked
+              }),
+            )
+
+            expect(asked).toBe(shouldAsk)
+          }),
+        )
+      }
     })
-  })
+  }
 })
 
 describe("tool.read truncation", () => {
-  test("truncates large file by bytes and sets truncated metadata", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        const base = await Filesystem.readText(path.join(FIXTURES_DIR, "models-api.json"))
-        const target = 60 * 1024
-        const content = base.length >= target ? base : base.repeat(Math.ceil(target / base.length))
-        await Filesystem.write(path.join(dir, "large.json"), content)
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "large.json") }, ctx)
-        expect(result.metadata.truncated).toBe(true)
-        expect(result.output).toContain("Output capped at")
-        expect(result.output).toContain("Use offset=")
-      },
-    })
-  })
+  it.live("truncates large file by bytes and sets truncated metadata", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      const base = yield* load(path.join(FIXTURES_DIR, "models-api.json"))
+      const target = 60 * 1024
+      const content = base.length >= target ? base : base.repeat(Math.ceil(target / base.length))
+      yield* put(path.join(dir, "large.json"), content)
 
-  test("truncates by line count when limit is specified", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        const lines = Array.from({ length: 100 }, (_, i) => `line${i}`).join("\n")
-        await Bun.write(path.join(dir, "many-lines.txt"), lines)
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "many-lines.txt"), limit: 10 }, ctx)
-        expect(result.metadata.truncated).toBe(true)
-        expect(result.output).toContain("Showing lines 1-10 of 100")
-        expect(result.output).toContain("Use offset=11")
-        expect(result.output).toContain("line0")
-        expect(result.output).toContain("line9")
-        expect(result.output).not.toContain("line10")
-      },
-    })
-  })
+      const result = yield* exec(dir, { filePath: path.join(dir, "large.json") })
+      expect(result.metadata.truncated).toBe(true)
+      expect(result.output).toContain("Output capped at")
+      expect(result.output).toContain("Use offset=")
+    }),
+  )
 
-  test("does not truncate small file", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        await Bun.write(path.join(dir, "small.txt"), "hello world")
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "small.txt") }, ctx)
-        expect(result.metadata.truncated).toBe(false)
-        expect(result.output).toContain("End of file")
-      },
-    })
-  })
+  it.live("truncates by line count when limit is specified", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      const lines = Array.from({ length: 100 }, (_, i) => `line${i}`).join("\n")
+      yield* put(path.join(dir, "many-lines.txt"), lines)
 
-  test("respects offset parameter", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        const lines = Array.from({ length: 20 }, (_, i) => `line${i + 1}`).join("\n")
-        await Bun.write(path.join(dir, "offset.txt"), lines)
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "offset.txt"), offset: 10, limit: 5 }, ctx)
-        expect(result.output).toContain("10: line10")
-        expect(result.output).toContain("14: line14")
-        expect(result.output).not.toContain("9: line10")
-        expect(result.output).not.toContain("15: line15")
-        expect(result.output).toContain("line10")
-        expect(result.output).toContain("line14")
-        expect(result.output).not.toContain("line0")
-        expect(result.output).not.toContain("line15")
-      },
-    })
-  })
+      const result = yield* exec(dir, { filePath: path.join(dir, "many-lines.txt"), limit: 10 })
+      expect(result.metadata.truncated).toBe(true)
+      expect(result.output).toContain("Showing lines 1-10 of 100")
+      expect(result.output).toContain("Use offset=11")
+      expect(result.output).toContain("line0")
+      expect(result.output).toContain("line9")
+      expect(result.output).not.toContain("line10")
+    }),
+  )
 
-  test("throws when offset is beyond end of file", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        const lines = Array.from({ length: 3 }, (_, i) => `line${i + 1}`).join("\n")
-        await Bun.write(path.join(dir, "short.txt"), lines)
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        await expect(
-          read.execute({ filePath: path.join(tmp.path, "short.txt"), offset: 4, limit: 5 }, ctx),
-        ).rejects.toThrow("Offset 4 is out of range for this file (3 lines)")
-      },
-    })
-  })
+  it.live("does not truncate small file", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      yield* put(path.join(dir, "small.txt"), "hello world")
 
-  test("allows reading empty file at default offset", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        await Bun.write(path.join(dir, "empty.txt"), "")
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "empty.txt") }, ctx)
-        expect(result.metadata.truncated).toBe(false)
-        expect(result.output).toContain("End of file - total 0 lines")
-      },
-    })
-  })
+      const result = yield* exec(dir, { filePath: path.join(dir, "small.txt") })
+      expect(result.metadata.truncated).toBe(false)
+      expect(result.output).toContain("End of file")
+    }),
+  )
 
-  test("throws when offset > 1 for empty file", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        await Bun.write(path.join(dir, "empty.txt"), "")
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        await expect(read.execute({ filePath: path.join(tmp.path, "empty.txt"), offset: 2 }, ctx)).rejects.toThrow(
-          "Offset 2 is out of range for this file (0 lines)",
-        )
-      },
-    })
-  })
+  it.live("respects offset parameter", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      const lines = Array.from({ length: 20 }, (_, i) => `line${i + 1}`).join("\n")
+      yield* put(path.join(dir, "offset.txt"), lines)
 
-  test("does not mark final directory page as truncated", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        await Promise.all(
-          Array.from({ length: 10 }, (_, i) => Bun.write(path.join(dir, "dir", `file-${i + 1}.txt`), `line${i}`)),
-        )
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "dir"), offset: 6, limit: 5 }, ctx)
-        expect(result.metadata.truncated).toBe(false)
-        expect(result.output).not.toContain("Showing 5 of 10 entries")
-      },
-    })
-  })
+      const result = yield* exec(dir, { filePath: path.join(dir, "offset.txt"), offset: 10, limit: 5 })
+      expect(result.output).toContain("10: line10")
+      expect(result.output).toContain("14: line14")
+      expect(result.output).not.toContain("9: line10")
+      expect(result.output).not.toContain("15: line15")
+      expect(result.output).toContain("line10")
+      expect(result.output).toContain("line14")
+      expect(result.output).not.toContain("line0")
+      expect(result.output).not.toContain("line15")
+    }),
+  )
 
-  test("truncates long lines", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        const longLine = "x".repeat(3000)
-        await Bun.write(path.join(dir, "long-line.txt"), longLine)
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "long-line.txt") }, ctx)
-        expect(result.output).toContain("(line truncated to 2000 chars)")
-        expect(result.output.length).toBeLessThan(3000)
-      },
-    })
-  })
-
-  test("image files set truncated to false", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        // 1x1 red PNG
-        const png = Buffer.from(
-          "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8DwHwAFBQIAX8jx0gAAAABJRU5ErkJggg==",
-          "base64",
-        )
-        await Bun.write(path.join(dir, "image.png"), png)
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "image.png") }, ctx)
-        expect(result.metadata.truncated).toBe(false)
-        expect(result.attachments).toBeDefined()
-        expect(result.attachments?.length).toBe(1)
-        expect(result.attachments?.[0]).not.toHaveProperty("id")
-        expect(result.attachments?.[0]).not.toHaveProperty("sessionID")
-        expect(result.attachments?.[0]).not.toHaveProperty("messageID")
-      },
-    })
-  })
-
-  test("large image files are properly attached without error", async () => {
-    await Instance.provide({
-      directory: FIXTURES_DIR,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(FIXTURES_DIR, "large-image.png") }, ctx)
-        expect(result.metadata.truncated).toBe(false)
-        expect(result.attachments).toBeDefined()
-        expect(result.attachments?.length).toBe(1)
-        expect(result.attachments?.[0].type).toBe("file")
-        expect(result.attachments?.[0]).not.toHaveProperty("id")
-        expect(result.attachments?.[0]).not.toHaveProperty("sessionID")
-        expect(result.attachments?.[0]).not.toHaveProperty("messageID")
-      },
-    })
-  })
+  it.live("throws when offset is beyond end of file", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      const lines = Array.from({ length: 3 }, (_, i) => `line${i + 1}`).join("\n")
+      yield* put(path.join(dir, "short.txt"), lines)
+
+      const err = yield* fail(dir, { filePath: path.join(dir, "short.txt"), offset: 4, limit: 5 })
+      expect(err.message).toContain("Offset 4 is out of range for this file (3 lines)")
+    }),
+  )
+
+  it.live("allows reading empty file at default offset", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      yield* put(path.join(dir, "empty.txt"), "")
+
+      const result = yield* exec(dir, { filePath: path.join(dir, "empty.txt") })
+      expect(result.metadata.truncated).toBe(false)
+      expect(result.output).toContain("End of file - total 0 lines")
+    }),
+  )
+
+  it.live("throws when offset > 1 for empty file", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      yield* put(path.join(dir, "empty.txt"), "")
+
+      const err = yield* fail(dir, { filePath: path.join(dir, "empty.txt"), offset: 2 })
+      expect(err.message).toContain("Offset 2 is out of range for this file (0 lines)")
+    }),
+  )
+
+  it.live("does not mark final directory page as truncated", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      yield* Effect.forEach(
+        Array.from({ length: 10 }, (_, i) => i),
+        (i) => put(path.join(dir, "dir", `file-${i + 1}.txt`), `line${i}`),
+        {
+          concurrency: "unbounded",
+        },
+      )
+
+      const result = yield* exec(dir, { filePath: path.join(dir, "dir"), offset: 6, limit: 5 })
+      expect(result.metadata.truncated).toBe(false)
+      expect(result.output).not.toContain("Showing 5 of 10 entries")
+    }),
+  )
 
-  test(".fbs files (FlatBuffers schema) are read as text, not images", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        // FlatBuffers schema content
-        const fbsContent = `namespace MyGame;
+  it.live("truncates long lines", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      yield* put(path.join(dir, "long-line.txt"), "x".repeat(3000))
+
+      const result = yield* exec(dir, { filePath: path.join(dir, "long-line.txt") })
+      expect(result.output).toContain("(line truncated to 2000 chars)")
+      expect(result.output.length).toBeLessThan(3000)
+    }),
+  )
+
+  it.live("image files set truncated to false", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      const png = Buffer.from(
+        "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8DwHwAFBQIAX8jx0gAAAABJRU5ErkJggg==",
+        "base64",
+      )
+      yield* put(path.join(dir, "image.png"), png)
+
+      const result = yield* exec(dir, { filePath: path.join(dir, "image.png") })
+      expect(result.metadata.truncated).toBe(false)
+      expect(result.attachments).toBeDefined()
+      expect(result.attachments?.length).toBe(1)
+      expect(result.attachments?.[0]).not.toHaveProperty("id")
+      expect(result.attachments?.[0]).not.toHaveProperty("sessionID")
+      expect(result.attachments?.[0]).not.toHaveProperty("messageID")
+    }),
+  )
+
+  it.live("large image files are properly attached without error", () =>
+    Effect.gen(function* () {
+      const result = yield* exec(FIXTURES_DIR, { filePath: path.join(FIXTURES_DIR, "large-image.png") })
+      expect(result.metadata.truncated).toBe(false)
+      expect(result.attachments).toBeDefined()
+      expect(result.attachments?.length).toBe(1)
+      expect(result.attachments?.[0].type).toBe("file")
+      expect(result.attachments?.[0]).not.toHaveProperty("id")
+      expect(result.attachments?.[0]).not.toHaveProperty("sessionID")
+      expect(result.attachments?.[0]).not.toHaveProperty("messageID")
+    }),
+  )
+
+  it.live(".fbs files (FlatBuffers schema) are read as text, not images", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      const fbs = `namespace MyGame;
 
 table Monster {
   pos:Vec3;
@@ -468,79 +417,52 @@ table Monster {
 }
 
 root_type Monster;`
-        await Bun.write(path.join(dir, "schema.fbs"), fbsContent)
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "schema.fbs") }, ctx)
-        // Should be read as text, not as image
-        expect(result.attachments).toBeUndefined()
-        expect(result.output).toContain("namespace MyGame")
-        expect(result.output).toContain("table Monster")
-      },
-    })
-  })
+      yield* put(path.join(dir, "schema.fbs"), fbs)
+
+      const result = yield* exec(dir, { filePath: path.join(dir, "schema.fbs") })
+      expect(result.attachments).toBeUndefined()
+      expect(result.output).toContain("namespace MyGame")
+      expect(result.output).toContain("table Monster")
+    }),
+  )
 })
 
 describe("tool.read loaded instructions", () => {
-  test("loads AGENTS.md from parent directory and includes in metadata", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        await Bun.write(path.join(dir, "subdir", "AGENTS.md"), "# Test Instructions\nDo something special.")
-        await Bun.write(path.join(dir, "subdir", "nested", "test.txt"), "test content")
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        const result = await read.execute({ filePath: path.join(tmp.path, "subdir", "nested", "test.txt") }, ctx)
-        expect(result.output).toContain("test content")
-        expect(result.output).toContain("system-reminder")
-        expect(result.output).toContain("Test Instructions")
-        expect(result.metadata.loaded).toBeDefined()
-        expect(result.metadata.loaded).toContain(path.join(tmp.path, "subdir", "AGENTS.md"))
-      },
-    })
-  })
+  it.live("loads AGENTS.md from parent directory and includes in metadata", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      yield* put(path.join(dir, "subdir", "AGENTS.md"), "# Test Instructions\nDo something special.")
+      yield* put(path.join(dir, "subdir", "nested", "test.txt"), "test content")
+
+      const result = yield* exec(dir, { filePath: path.join(dir, "subdir", "nested", "test.txt") })
+      expect(result.output).toContain("test content")
+      expect(result.output).toContain("system-reminder")
+      expect(result.output).toContain("Test Instructions")
+      expect(result.metadata.loaded).toBeDefined()
+      expect(result.metadata.loaded).toContain(path.join(dir, "subdir", "AGENTS.md"))
+    }),
+  )
 })
 
 describe("tool.read binary detection", () => {
-  test("rejects text extension files with null bytes", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        const bytes = Buffer.from([0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x00, 0x77, 0x6f, 0x72, 0x6c, 0x64])
-        await Bun.write(path.join(dir, "null-byte.txt"), bytes)
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        await expect(read.execute({ filePath: path.join(tmp.path, "null-byte.txt") }, ctx)).rejects.toThrow(
-          "Cannot read binary file",
-        )
-      },
-    })
-  })
+  it.live("rejects text extension files with null bytes", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      const bytes = Buffer.from([0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x00, 0x77, 0x6f, 0x72, 0x6c, 0x64])
+      yield* put(path.join(dir, "null-byte.txt"), bytes)
 
-  test("rejects known binary extensions", async () => {
-    await using tmp = await tmpdir({
-      init: async (dir) => {
-        await Bun.write(path.join(dir, "module.wasm"), "not really wasm")
-      },
-    })
-    await Instance.provide({
-      directory: tmp.path,
-      fn: async () => {
-        const read = await ReadTool.init()
-        await expect(read.execute({ filePath: path.join(tmp.path, "module.wasm") }, ctx)).rejects.toThrow(
-          "Cannot read binary file",
-        )
-      },
-    })
-  })
+      const err = yield* fail(dir, { filePath: path.join(dir, "null-byte.txt") })
+      expect(err.message).toContain("Cannot read binary file")
+    }),
+  )
+
+  it.live("rejects known binary extensions", () =>
+    Effect.gen(function* () {
+      const dir = yield* tmpdirScoped()
+      yield* put(path.join(dir, "module.wasm"), "not really wasm")
+
+      const err = yield* fail(dir, { filePath: path.join(dir, "module.wasm") })
+      expect(err.message).toContain("Cannot read binary file")
+    }),
+  )
 })