Просмотр исходного кода

fix(opencode): batch snapshot revert without reordering (#20564)

Kit Langton 2 недель назад
Родитель
Сommit
48db7cf07a

+ 95 - 10
packages/opencode/src/snapshot/index.ts

@@ -301,28 +301,113 @@ export namespace Snapshot {
           const revert = Effect.fnUntraced(function* (patches: Snapshot.Patch[]) {
             return yield* locked(
               Effect.gen(function* () {
+                const ops: { hash: string; file: string; rel: string }[] = []
                 const seen = new Set<string>()
                 for (const item of patches) {
                   for (const file of item.files) {
                     if (seen.has(file)) continue
                     seen.add(file)
-                    log.info("reverting", { file, hash: item.hash })
-                    const result = yield* git([...core, ...args(["checkout", item.hash, "--", file])], {
+                    ops.push({
+                      hash: item.hash,
+                      file,
+                      rel: path.relative(state.worktree, file).replaceAll("\\", "/"),
+                    })
+                  }
+                }
+
+                const single = Effect.fnUntraced(function* (op: (typeof ops)[number]) {
+                  log.info("reverting", { file: op.file, hash: op.hash })
+                  const result = yield* git([...core, ...args(["checkout", op.hash, "--", op.file])], {
+                    cwd: state.worktree,
+                  })
+                  if (result.code === 0) return
+                  const tree = yield* git([...core, ...args(["ls-tree", op.hash, "--", op.rel])], {
+                    cwd: state.worktree,
+                  })
+                  if (tree.code === 0 && tree.text.trim()) {
+                    log.info("file existed in snapshot but checkout failed, keeping", { file: op.file, hash: op.hash })
+                    return
+                  }
+                  log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash })
+                  yield* remove(op.file)
+                })
+
+                const clash = (a: string, b: string) => a === b || a.startsWith(`${b}/`) || b.startsWith(`${a}/`)
+
+                for (let i = 0; i < ops.length; ) {
+                  const first = ops[i]!
+                  const run = [first]
+                  let j = i + 1
+                  // Only batch adjacent files when their paths cannot affect each other.
+                  while (j < ops.length && run.length < 100) {
+                    const next = ops[j]!
+                    if (next.hash !== first.hash) break
+                    if (run.some((item) => clash(item.rel, next.rel))) break
+                    run.push(next)
+                    j += 1
+                  }
+
+                  if (run.length === 1) {
+                    yield* single(first)
+                    i = j
+                    continue
+                  }
+
+                  const tree = yield* git(
+                    [...core, ...args(["ls-tree", "--name-only", first.hash, "--", ...run.map((item) => item.rel)])],
+                    {
                       cwd: state.worktree,
+                    },
+                  )
+
+                  if (tree.code !== 0) {
+                    log.info("batched ls-tree failed, falling back to single-file revert", {
+                      hash: first.hash,
+                      files: run.length,
                     })
-                    if (result.code !== 0) {
-                      const rel = path.relative(state.worktree, file)
-                      const tree = yield* git([...core, ...args(["ls-tree", item.hash, "--", rel])], {
+                    for (const op of run) {
+                      yield* single(op)
+                    }
+                    i = j
+                    continue
+                  }
+
+                  const have = new Set(
+                    tree.text
+                      .trim()
+                      .split("\n")
+                      .map((item) => item.trim())
+                      .filter(Boolean),
+                  )
+                  const list = run.filter((item) => have.has(item.rel))
+                  if (list.length) {
+                    log.info("reverting", { hash: first.hash, files: list.length })
+                    const result = yield* git(
+                      [...core, ...args(["checkout", first.hash, "--", ...list.map((item) => item.file)])],
+                      {
                         cwd: state.worktree,
+                      },
+                    )
+                    if (result.code !== 0) {
+                      log.info("batched checkout failed, falling back to single-file revert", {
+                        hash: first.hash,
+                        files: list.length,
                       })
-                      if (tree.code === 0 && tree.text.trim()) {
-                        log.info("file existed in snapshot but checkout failed, keeping", { file })
-                      } else {
-                        log.info("file did not exist in snapshot, deleting", { file })
-                        yield* remove(file)
+                      for (const op of run) {
+                        yield* single(op)
                       }
+                      i = j
+                      continue
                     }
                   }
+
+                  for (const op of run) {
+                    if (have.has(op.rel)) continue
+                    log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash })
+                    yield* remove(op.file)
+                  }
+
+                  i = j
                 }
               }),
             )

+ 77 - 0
packages/opencode/test/snapshot/snapshot.test.ts

@@ -1233,3 +1233,80 @@ test("revert with overlapping files across patches uses first patch hash", async
     },
   })
 })
+
+test("revert preserves patch order when the same hash appears again", async () => {
+  await using tmp = await bootstrap()
+  await Instance.provide({
+    directory: tmp.path,
+    fn: async () => {
+      await $`mkdir -p ${tmp.path}/foo`.quiet()
+      await Filesystem.write(`${tmp.path}/foo/bar`, "v1")
+      await Filesystem.write(`${tmp.path}/a.txt`, "v1")
+
+      const snap1 = await Snapshot.track()
+      expect(snap1).toBeTruthy()
+
+      await $`rm -rf ${tmp.path}/foo`.quiet()
+      await Filesystem.write(`${tmp.path}/foo`, "v2")
+      await Filesystem.write(`${tmp.path}/a.txt`, "v2")
+
+      const snap2 = await Snapshot.track()
+      expect(snap2).toBeTruthy()
+
+      await $`rm -rf ${tmp.path}/foo`.quiet()
+      await Filesystem.write(`${tmp.path}/a.txt`, "v3")
+
+      await Snapshot.revert([
+        { hash: snap1!, files: [fwd(tmp.path, "a.txt")] },
+        { hash: snap2!, files: [fwd(tmp.path, "foo")] },
+        { hash: snap1!, files: [fwd(tmp.path, "foo", "bar")] },
+      ])
+
+      expect(await fs.readFile(`${tmp.path}/a.txt`, "utf-8")).toBe("v1")
+      expect((await fs.stat(`${tmp.path}/foo`)).isDirectory()).toBe(true)
+      expect(await fs.readFile(`${tmp.path}/foo/bar`, "utf-8")).toBe("v1")
+    },
+  })
+})
+
+test("revert handles large mixed batches across chunk boundaries", async () => {
+  await using tmp = await bootstrap()
+  await Instance.provide({
+    directory: tmp.path,
+    fn: async () => {
+      const base = Array.from({ length: 140 }, (_, i) => fwd(tmp.path, "batch", `${i}.txt`))
+      const fresh = Array.from({ length: 140 }, (_, i) => fwd(tmp.path, "fresh", `${i}.txt`))
+
+      await $`mkdir -p ${tmp.path}/batch ${tmp.path}/fresh`.quiet()
+      await Promise.all(base.map((file, i) => Filesystem.write(file, `base-${i}`)))
+
+      const snap = await Snapshot.track()
+      expect(snap).toBeTruthy()
+
+      await Promise.all(base.map((file, i) => Filesystem.write(file, `next-${i}`)))
+      await Promise.all(fresh.map((file, i) => Filesystem.write(file, `fresh-${i}`)))
+
+      const patch = await Snapshot.patch(snap!)
+      expect(patch.files.length).toBe(base.length + fresh.length)
+
+      await Snapshot.revert([patch])
+
+      await Promise.all(
+        base.map(async (file, i) => {
+          expect(await fs.readFile(file, "utf-8")).toBe(`base-${i}`)
+        }),
+      )
+
+      await Promise.all(
+        fresh.map(async (file) => {
+          expect(
+            await fs
+              .access(file)
+              .then(() => true)
+              .catch(() => false),
+          ).toBe(false)
+        }),
+      )
+    },
+  })
+})