Explorar o código

use agent-browser for sync parallel clients tests

Tienson Qin hai 6 días
pai
achega
92999a6a76

+ 7 - 0
deps/db-sync/README.md

@@ -56,6 +56,13 @@ yarn download-graph-db --graph-id 6f2d7f6f-xxxx-xxxx-xxxx-xxxxxxxxxxxx --admin-t
 You can also pass `--admin-token <token>` or set `DB_SYNC_ADMIN_TOKEN`. The output defaults to
 `tmp/graph-<graph-id>.snapshot.sqlite` and can be changed with `--output`.
 
+Show stored and recomputed checksum for a local sqlite graph db:
+
+```bash
+cd deps/db-sync
+yarn show-sqlite-checksum --db ~/Downloads/test.sqlite
+```
+
 Delete the graphs owned by a production user after an explicit confirmation:
 
 ```bash

+ 2 - 0
deps/db-sync/package.json

@@ -13,6 +13,7 @@
     "build:node-adapter": "clojure -M:cljs release db-sync-node",
     "dev:node-adapter": "clojure -M:cljs watch db-sync-node",
     "start:node-adapter": "node worker/dist/node-adapter.js",
+    "show-sqlite-checksum": "clojure -M:cljs compile db-sync-show-checksum && node worker/dist/show-sqlite-checksum.js",
     "migrate:local": "cd ./worker && wrangler d1 migrations apply DB --local",
     "migrate:staging": "cd ./worker && wrangler d1 migrations apply logseq-sync-graph-meta-staging --env staging --remote",
     "migrate:prod": "cd ./worker && wrangler d1 migrations apply logseq-sync-graphs-prod --env prod --remote",
@@ -28,6 +29,7 @@
     "@sentry/node": "^10.45.0",
     "better-sqlite3": "^12.8.0",
     "shadow-cljs": "^3.3.4",
+    "transit-js": "^0.8.874",
     "ws": "^8.19.0"
   },
   "devDependencies": {

+ 7 - 0
deps/db-sync/shadow-cljs.edn

@@ -22,6 +22,13 @@
                                                :redef false}}
                  :devtools {:enabled false}
                  :closure-defines {goog.debug.LOGGING_ENABLED true}}
+  :db-sync-show-checksum {:target :node-script
+                          :output-to "worker/dist/show-sqlite-checksum.js"
+                          :main logseq.db-sync.node.show-checksum/main
+                          :compiler-options {:source-map true
+                                             :warnings {:fn-deprecated false
+                                                        :redef false}}
+                          :devtools {:enabled false}}
   :db-sync-test {:target :node-test
                  :output-to "worker/dist/worker-test.js"
                  :devtools {:enabled false}

+ 3 - 11
deps/db-sync/src/logseq/db_sync/checksum.cljs

@@ -110,11 +110,7 @@
 (defn- checksum-eligible-entity?
   [db eid]
   (when-let [ent (d/entity db eid)]
-    (and (:block/uuid ent)
-         (not (ldb/built-in? ent))
-         (nil? (:logseq.property/deleted-at ent))
-         (or (ldb/page? ent)
-             (:block/page ent)))))
+    (uuid? (:block/uuid ent))))
 
 (defn- entity-digest
   [db eid e2ee?]
@@ -135,12 +131,8 @@
 (defn recompute-checksum
   [db]
   (let [e2ee? (ldb/get-graph-rtc-e2ee? db)
-        attrs (relevant-attrs e2ee?)
-        eids (->> (d/datoms db :eavt)
-                  (keep (fn [datom]
-                          (when (contains? attrs (:a datom))
-                            (:e datom))))
-                  distinct)]
+        eids (->> (d/datoms db :avet :block/uuid)
+                  (map :e))]
     (->> eids
          (reduce (fn [[sum-fnv sum-djb] eid]
                    (if-let [[fnv djb] (entity-digest db eid e2ee?)]

+ 105 - 0
deps/db-sync/src/logseq/db_sync/node/show_checksum.cljs

@@ -0,0 +1,105 @@
+(ns logseq.db-sync.node.show-checksum
+  (:require ["better-sqlite3" :as sqlite3]
+            ["fs" :as fs]
+            ["path" :as node-path]
+            [clojure.string :as string]
+            [datascript.core :as d]
+            [logseq.db-sync.checksum :as sync-checksum]
+            [logseq.db-sync.storage :as storage]))
+
+(def sqlite (if (find-ns 'nbb.core) (aget sqlite3 "default") sqlite3))
+
+(defn- fail!
+  [msg]
+  (binding [*print-fn* *print-err-fn*]
+    (println (str "Error: " msg)))
+  (js/process.exit 1))
+
+(defn- parse-args
+  [argv]
+  (let [args (vec argv)]
+    (cond
+      (some #{"-h" "--help"} args)
+      {:help? true}
+
+      :else
+      (loop [m {}
+             xs args]
+        (if-let [x (first xs)]
+          (cond
+            (= "--db" x)
+            (if-let [db (second xs)]
+              (recur (assoc m :db db) (nnext xs))
+              (fail! "Missing value for --db"))
+
+            :else
+            (fail! (str "Unknown argument: " x)))
+          m)))))
+
+(defn- print-help!
+  []
+  (println "Show checksums for a sqlite db.")
+  (println "")
+  (println "Usage:")
+  (println "  node worker/dist/show-sqlite-checksum.js --db <path/to/db.sqlite>")
+  (println "")
+  (println "Output:")
+  (println "  - stored checksum (sync_meta.checksum, if present)")
+  (println "  - recomputed checksum (from datascript graph)"))
+
+(defn- normalize-sql
+  [sql]
+  (-> sql string/trim string/lower-case))
+
+(defn- select-sql?
+  [sql]
+  (string/starts-with? (normalize-sql sql) "select"))
+
+(defn- exec-with-args [^js stmt args]
+  (.apply (.-run stmt) stmt (to-array args)))
+
+(defn- all-with-args [^js stmt args]
+  (.apply (.-all stmt) stmt (to-array args)))
+
+(defn- wrap-db [^js db]
+  #js {:exec (fn [sql & args]
+               (if (seq args)
+                 (let [stmt (.prepare db sql)]
+                   (if (select-sql? sql)
+                     (all-with-args stmt args)
+                     (do
+                       (exec-with-args stmt args)
+                       nil)))
+                 (if (select-sql? sql)
+                   (.all (.prepare db sql))
+                   (.exec db sql))))
+       :close (fn [] (.close db))
+       :_db db})
+
+(defn- datom-count
+  [db]
+  (count (d/datoms db :eavt)))
+
+(defn main
+  [& argv]
+  (let [{:keys [help? db]} (parse-args argv)]
+    (when help?
+      (print-help!)
+      (js/process.exit 0))
+    (when-not db
+      (fail! "Missing required --db <path> argument"))
+    (let [db-path (node-path/resolve db)]
+      (when-not (.existsSync fs db-path)
+        (fail! (str "SQLite file not found: " db-path)))
+      (let [sqlite-db (new sqlite db-path nil)
+            sql (wrap-db sqlite-db)]
+        (try
+          (storage/init-schema! sql)
+          (let [conn (storage/open-conn sql)
+                db' @conn
+                recomputed-checksum (sync-checksum/recompute-checksum db')]
+            (println (str "db: " db-path))
+            (println (str "datoms: " (datom-count db')))
+            (println (str "recomputed-checksum: " recomputed-checksum)))
+          (finally
+            (.close sql)))))))

+ 28 - 10
deps/db-sync/src/logseq/db_sync/storage.cljs

@@ -3,6 +3,7 @@
             [clojure.string :as string]
             [datascript.core :as d]
             [datascript.storage :refer [IStorage]]
+            [logseq.db :as ldb]
             [logseq.db-sync.checksum :as sync-checksum]
             [logseq.db-sync.common :as common]
             [logseq.db.common.normalize :as db-normalize]
@@ -145,17 +146,34 @@
 
 (defn- append-tx-for-tx-report
   [sql {:keys [db-after db-before tx-data tx-meta] :as tx-report}]
-  (let [created-at (common/now-ms)
-        normalized-data (->> tx-data
-                             (db-normalize/normalize-tx-data db-after db-before))
-        ;; _ (prn :debug :tx-data tx-data)
-        ;; _ (prn :debug :normalized-data normalized-data)
-        tx-str (common/write-transit normalized-data)
-        new-t (next-t! sql)
-        prev-checksum (get-checksum sql)
-        checksum (sync-checksum/update-checksum prev-checksum tx-report)]
+  (let [prev-checksum (get-checksum sql)
+        ;; checksum (sync-checksum/update-checksum prev-checksum tx-report)
+        checksum (sync-checksum/recompute-checksum db-after)]
+    ;; (when (and prev-checksum (not= checksum (sync-checksum/recompute-checksum db-after)))
+    ;;     (prn :debug :before-checksum-error {:prev-checksum prev-checksum
+    ;;                                         :recomputed-after-checksum (sync-checksum/recompute-checksum db-after)
+    ;;                                         :tx-meta tx-meta
+    ;;                                         :tx-data tx-data
+    ;;                                         :db-before (ldb/write-transit-str db-before)
+    ;;                                         :db-after (ldb/write-transit-str db-after)})
+    ;;     (throw (ex-info "server checksum doesn't match"
+    ;;                     {:prev-checksum prev-checksum
+    ;;                                         :recomputed-after-checksum (sync-checksum/recompute-checksum db-after)
+    ;;                                         :tx-meta tx-meta
+    ;;                                         :tx-data tx-data
+    ;;                                         :db-before (ldb/write-transit-str db-before)
+    ;;                                         :db-after (ldb/write-transit-str db-after)})))
+
     (set-checksum! sql checksum)
-    (append-tx! sql new-t tx-str created-at (:outliner-op tx-meta))))
+    (when-not (empty? tx-data)
+      (let [created-at (common/now-ms)
+            normalized-data (->> tx-data
+                                 (db-normalize/normalize-tx-data db-after db-before))
+            ;; _ (prn :debug :tx-data tx-data)
+            ;; _ (prn :debug :normalized-data normalized-data)
+            tx-str (common/write-transit normalized-data)
+            new-t (next-t! sql)]
+        (append-tx! sql new-t tx-str created-at (:outliner-op tx-meta))))))
 
 (defn- listen-db-updates!
   [sql conn]

+ 13 - 35
deps/db-sync/src/logseq/db_sync/worker/handler/sync.cljs

@@ -294,49 +294,26 @@
       (reset-import! sql))
     (import-snapshot-rows! sql "kvs" rows)))
 
-(defn- sanitize-client-tx-data
-  [conn txs]
-  (let [lookup-id (fn [x]
-                    (when (and (vector? x)
-                               (= 2 (count x))
-                               (= :block/uuid (first x)))
-                      (second x)))
-        tx-data* (protocol/transit->tx txs)
-        created-block-uuids (->> tx-data*
-                                 (keep (fn [item]
-                                         (when (and (vector? item)
-                                                    (= :db/add (first item))
-                                                    (>= (count item) 4)
-                                                    (= :block/uuid (nth item 2)))
-                                           (nth item 3))))
-                                 set)
-        missing-lookup-ref? (fn [x]
-                              (when-let [block-uuid (lookup-id x)]
-                                (and (not (contains? created-block-uuids block-uuid))
-                                     (nil? (d/entity @conn x)))))]
-    (remove (fn [item]
-              (when (vector? item)
-                (let [op (first item)
-                      attr (nth item 2 nil)
-                      value (when (>= (count item) 4) (nth item 3))]
-                  (or (and (contains? #{:db/add :db/retract :db/retractEntity} op)
-                           (missing-lookup-ref? (second item)))
-                      (and (contains? #{:db/add :db/retract} op)
-                           (contains? db-schema/ref-type-attributes attr)
-                           (missing-lookup-ref? value))))))
-            tx-data*)))
+(defn- sanitize-tx
+  [db outliner-op tx-data]
+  (if (= outliner-op :fix)
+    (remove (fn [[_ id]]
+              (nil? (d/entity db id))) tx-data)
+    tx-data))
 
 (defn- apply-tx-entry!
   [conn {:keys [tx outliner-op]}]
-  (let [tx-data (sanitize-client-tx-data conn tx)]
+  (let [tx-data (->> (protocol/transit->tx tx)
+                     (sanitize-tx @conn outliner-op))]
+    (prn :debug :entity (:block/parent (d/entity @conn [:block/uuid #uuid "69d38949-1f23-4e1c-b7d9-861c16cfdb2c"])))
     (when (seq tx-data)
       (ldb/transact! conn tx-data (cond-> {:op :apply-client-tx}
                                     outliner-op (assoc :outliner-op outliner-op))))))
 
 (defn- db-transact-failed-response
-  [sql tx-entry]
+  [sql tx-entry message]
   {:type "tx/reject"
-   :reason "db transact failed"
+   :reason (str "db transact failed: " message)
    :t (storage/get-t sql)
    :data (common/write-transit tx-entry)})
 
@@ -350,8 +327,9 @@
                          (apply-tx-entry! conn tx-entry)
                          ::ok
                          (catch :default e
+                           (throw e)
                            (log/error :db-sync/transact-failed e)
-                           (db-transact-failed-response sql tx-entry)))]
+                           (db-transact-failed-response sql tx-entry (.-message e))))]
             (if (= ::ok result)
               (recur (next remaining))
               result))

+ 42 - 0
deps/db-sync/test/logseq/db_sync/checksum_test.cljs

@@ -141,6 +141,48 @@
           incremental (checksum/update-checksum (checksum/recompute-checksum db-before) tx-report)]
       (is (= full incremental)))))
 
+(deftest incremental-checksum-matches-recompute-when-block-is-readded-test
+  (testing "incremental checksum remains equal to recompute when a block is deleted and re-added with the same UUID"
+    (let [db0 (sample-db)
+          checksum0 (checksum/recompute-checksum db0)
+          child-uuid (:block/uuid (d/entity db0 4))
+          parent-uuid (:block/uuid (d/entity db0 3))
+          page-uuid (:block/uuid (d/entity db0 1))
+          {:keys [db checksum]} (assert-incremental=full! db0 checksum0 [[:db/retractEntity [:block/uuid child-uuid]]])]
+      (assert-incremental=full! db checksum [{:db/id -1
+                                              :block/uuid child-uuid
+                                              :block/title "Child"
+                                              :block/parent [:block/uuid parent-uuid]
+                                              :block/page [:block/uuid page-uuid]}]))))
+
+(deftest incremental-checksum-matches-recompute-when-delete-tree-undo-and-delete-again-test
+  (testing "incremental checksum matches recompute across delete-tree, undo-all, then delete-tree-again"
+    (let [db0 (sample-db)
+          parent-uuid (:block/uuid (d/entity db0 3))
+          child-uuid (:block/uuid (d/entity db0 4))
+          page-uuid (:block/uuid (d/entity db0 1))
+          tx-seq [{:tx-data [[:db/retractEntity [:block/uuid child-uuid]]
+                             [:db/retractEntity [:block/uuid parent-uuid]]]}
+                  {:tx-data [{:db/id -1
+                              :block/uuid parent-uuid
+                              :block/title "Parent"
+                              :block/parent [:block/uuid page-uuid]
+                              :block/page [:block/uuid page-uuid]}
+                             {:db/id -2
+                              :block/uuid child-uuid
+                              :block/title "Child"
+                              :block/parent [:block/uuid parent-uuid]
+                              :block/page [:block/uuid page-uuid]}]}
+                  {:tx-data [[:db/retractEntity [:block/uuid child-uuid]]
+                             [:db/retractEntity [:block/uuid parent-uuid]]]}]
+          {:keys [db checksum]} (reduce
+                                 (fn [{:keys [db checksum]} {:keys [tx-data]}]
+                                   (assert-incremental=full! db checksum tx-data))
+                                 {:db db0
+                                  :checksum (checksum/recompute-checksum db0)}
+                                 tx-seq)]
+      (is (= checksum (checksum/recompute-checksum db))))))
+
 (deftest recompute-checksum-diagnostics-includes-relevant-attrs-test
   (testing "diagnostics includes checksum attrs and block values used for checksum export"
     (let [db (sample-db)

+ 54 - 1
deps/db-sync/test/logseq/db_sync/storage_test.cljs

@@ -1,8 +1,42 @@
 (ns logseq.db-sync.storage-test
-  (:require [cljs.test :refer [deftest is]]
+  (:require ["better-sqlite3" :as sqlite3]
+            [clojure.string :as string]
+            [cljs.test :refer [deftest is testing]]
+            [datascript.core :as d]
             [logseq.db-sync.storage :as storage]
             [logseq.db-sync.test-sql :as test-sql]))
 
+(def sqlite (if (find-ns 'nbb.core) (aget sqlite3 "default") sqlite3))
+
+(defn- select-sql?
+  [sql]
+  (string/starts-with? (-> sql string/trim string/lower-case) "select"))
+
+(defn- run-sql
+  [^js stmt args]
+  (.apply (.-run stmt) stmt (to-array args)))
+
+(defn- all-sql
+  [^js stmt args]
+  (.apply (.-all stmt) stmt (to-array args)))
+
+(defn- with-memory-sql
+  [f]
+  (let [db (new sqlite ":memory:" nil)
+        sql #js {:exec (fn [sql-str & args]
+                         (let [stmt (.prepare db sql-str)]
+                           (if (select-sql? sql-str)
+                             (all-sql stmt args)
+                             (do
+                               (run-sql stmt args)
+                               nil))))
+                 :close (fn []
+                          (.close db))}]
+    (try
+      (f sql)
+      (finally
+        (.close sql)))))
+
 (deftest t-counter-test
   (let [sql (test-sql/make-sql)]
     (storage/init-schema! sql)
@@ -21,3 +55,22 @@
       (is (= [{:t 2 :tx "tx-2" :outliner-op :move-blocks}
               {:t 3 :tx "tx-3" :outliner-op nil}]
              result)))))
+
+(deftest stale-checksum-no-op-transact-does-not-throw-test
+  (testing "a no-op tx should not throw and should keep incremental checksum state"
+    (with-memory-sql
+      (fn [sql]
+        (let [stale-checksum "f4b78e83776d45fb"]
+          (storage/init-schema! sql)
+          (storage/set-checksum! sql stale-checksum)
+          (let [conn (storage/open-conn sql)
+                result (try
+                         (d/transact! conn
+                                      []
+                                      {:outliner-op :rebase})
+                         :ok
+                         (catch :default e
+                           e))]
+            (is (= :ok result))
+            (is (= stale-checksum
+                   (storage/get-checksum sql)))))))))

+ 43 - 12
deps/db-sync/worker/scripts/download_graph_db.js

@@ -5,6 +5,7 @@ const fs = require("node:fs");
 const path = require("node:path");
 const zlib = require("node:zlib");
 const { parseArgs } = require("node:util");
+const transit = require("transit-js");
 const { fail } = require("./graph_user_lib");
 
 const defaultBaseUrl = "https://api.logseq.com";
@@ -134,16 +135,45 @@ function maybeDecompressBuffer(buffer, contentEncoding) {
   return buffer;
 }
 
-function snapshotBufferToLines(buffer) {
-  const text = buffer.toString("utf8");
-  return text
-    .split(/\r?\n/)
-    .filter((line) => line.length > 0);
+function parseFramedRows(buffer) {
+  const rows = [];
+  const reader = transit.reader("json");
+  let offset = 0;
+
+  while (offset < buffer.length) {
+    if (buffer.length - offset < 4) {
+      throw new Error("Invalid snapshot payload: incomplete frame header");
+    }
+
+    const frameLength = buffer.readUInt32BE(offset);
+    offset += 4;
+
+    if (buffer.length - offset < frameLength) {
+      throw new Error("Invalid snapshot payload: incomplete frame payload");
+    }
+
+    const payload = buffer.subarray(offset, offset + frameLength);
+    offset += frameLength;
+
+    const batch = reader.read(payload.toString("utf8"));
+    if (!Array.isArray(batch)) {
+      throw new Error("Invalid snapshot payload: decoded frame is not an array");
+    }
+
+    for (const row of batch) {
+      if (!Array.isArray(row) || row.length < 2) {
+        throw new Error("Invalid snapshot payload: row must be [addr, content, addresses?]");
+      }
+      rows.push(row);
+    }
+  }
+
+  return rows;
 }
 
 function writeSnapshotSqlite({
   outputPath,
-  lines,
+  rows,
 }) {
   fs.mkdirSync(path.dirname(outputPath), { recursive: true });
   if (fs.existsSync(outputPath)) {
@@ -165,8 +195,9 @@ function writeSnapshotSqlite({
     );
 
     const writeAll = db.transaction(() => {
-      for (let index = 0; index < lines.length; index += 1) {
-        upsertKvs.run(index + 1, lines[index], null);
+      for (const row of rows) {
+        const [addr, content, addresses] = row;
+        upsertKvs.run(addr, content, addresses ?? null);
       }
     });
 
@@ -186,16 +217,16 @@ async function main() {
   const snapshot = await fetchSnapshotBytes(descriptor.url, options.adminToken);
   const effectiveEncoding = descriptor["content-encoding"] || snapshot.contentEncoding || "";
   const decompressed = maybeDecompressBuffer(snapshot.buffer, effectiveEncoding);
-  const lines = snapshotBufferToLines(decompressed);
+  const rows = parseFramedRows(decompressed);
 
   writeSnapshotSqlite({
     outputPath: options.output,
-    lines,
+    rows,
   });
 
   console.log(`Saved graph snapshot sqlite to ${options.output}`);
   console.log(`Graph: ${options.graphId}`);
-  console.log(`Rows: ${lines.length}`);
+  console.log(`Rows: ${rows.length}`);
   if (descriptor.key) {
     console.log(`Snapshot key: ${descriptor.key}`);
   }
@@ -209,7 +240,7 @@ if (require.main === module) {
 
 module.exports = {
   parseCliArgs,
+  parseFramedRows,
   sanitizeGraphIdForFilename,
-  snapshotBufferToLines,
   writeSnapshotSqlite,
 };

+ 36 - 10
deps/db-sync/worker/scripts/download_graph_db.test.js

@@ -4,10 +4,12 @@ const fs = require("node:fs");
 const os = require("node:os");
 const path = require("node:path");
 const test = require("node:test");
+const transit = require("transit-js");
 
 const Database = require("better-sqlite3");
 
 const {
+  parseFramedRows,
   parseCliArgs,
   sanitizeGraphIdForFilename,
   writeSnapshotSqlite,
@@ -59,18 +61,42 @@ test("CLI rejects missing admin-token when env is absent", () => {
   assert.match(result.stderr, /Missing admin token/);
 });
 
+test("parseFramedRows decodes framed transit batches", () => {
+  const writer = transit.writer("json");
+  const rowsA = [
+    [1, "line-1", null],
+    [2, "line-2", null],
+  ];
+  const rowsB = [
+    [1000606, "line-1000606", "{\"1\":[2,3]}"],
+  ];
+
+  const encodeFrame = (rows) => {
+    const payload = Buffer.from(writer.write(rows), "utf8");
+    const frame = Buffer.allocUnsafe(4 + payload.length);
+    frame.writeUInt32BE(payload.length, 0);
+    payload.copy(frame, 4);
+    return frame;
+  };
+
+  const framed = Buffer.concat([encodeFrame(rowsA), encodeFrame(rowsB)]);
+  const parsed = parseFramedRows(framed);
+
+  assert.deepEqual(parsed, [...rowsA, ...rowsB]);
+});
+
 test("writeSnapshotSqlite writes kvs-only sqlite like local dbs", () => {
   const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "db-sync-download-test-"));
   const dbPath = path.join(tmpDir, "graph-debug.sqlite");
+  const rows = [
+    [1, "line-1", null],
+    [2, "line-2", null],
+    [1000606, "line-1000606", "{\"1\":[2,3]}"],
+  ];
 
   writeSnapshotSqlite({
     outputPath: dbPath,
-    graphId: "graph-1",
-    snapshotKey: "graph-1/snapshot-123.snapshot",
-    snapshotUrl: "https://api.logseq.com/assets/graph-1/snapshot-123.snapshot",
-    contentEncoding: "gzip",
-    rawBytes: 512,
-    lines: ["line-1", "line-2", "line-3"],
+    rows,
   });
 
   const db = new Database(dbPath, { readonly: true });
@@ -79,13 +105,13 @@ test("writeSnapshotSqlite writes kvs-only sqlite like local dbs", () => {
     .all()
     .map((row) => row.name);
   const rowCount = db.prepare("select count(1) as count from kvs").get().count;
-  const row3 = db.prepare("select addr, content, addresses from kvs where addr = 3").get();
+  const row1000606 = db.prepare("select addr, content, addresses from kvs where addr = 1000606").get();
 
   assert.deepEqual(tableNames, ["kvs"]);
   assert.equal(rowCount, 3);
-  assert.equal(row3.addr, 3);
-  assert.equal(row3.content, "line-3");
-  assert.equal(row3.addresses, null);
+  assert.equal(row1000606.addr, 1000606);
+  assert.equal(row1000606.content, "line-1000606");
+  assert.equal(row1000606.addresses, "{\"1\":[2,3]}");
 
   db.close();
 });

+ 5 - 0
deps/db-sync/yarn.lock

@@ -941,6 +941,11 @@ tar-stream@^2.1.4:
     inherits "^2.0.3"
     readable-stream "^3.1.1"
 
+transit-js@^0.8.874:
+  version "0.8.874"
+  resolved "https://registry.yarnpkg.com/transit-js/-/transit-js-0.8.874.tgz#37b27d6632bd796567c359220297c862e3988bbb"
+  integrity sha512-IDJJGKRzUbJHmN0P15HBBa05nbKor3r2MmG6aSt0UxXIlJZZKcddTk67/U7WyAeW9Hv/VYI02IqLzolsC4sbPA==
+
 tunnel-agent@^0.6.0:
   version "0.6.0"
   resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd"

+ 28 - 16
deps/db/src/logseq/db.cljs

@@ -96,14 +96,15 @@
     (throw (ex-info "Page can't have block as parent"
                     {:tx-data tx-data}))))
 
-(defn debounced-store-db
-  [conn]
-  (when-some [_storage (storage/storage @conn)]
-    (when-not (:batch-tx? @conn)
-      (let [f (if (exists? js/process)
-                d/store
-                (or @*debounce-fn d/store))]
-        (f @conn)))))
+(comment
+  (defn debounced-store-db
+   [conn]
+   (when-some [_storage (storage/storage @conn)]
+     (when-not (:batch-tx? @conn)
+       (let [f (if (exists? js/process)
+                 d/store
+                 (or @*debounce-fn d/store))]
+         (f @conn))))))
 
 (defn- transact-sync
   [conn tx-data tx-meta]
@@ -130,7 +131,7 @@
                        (seq (:tx-data tx-report)))
               ;; perf enhancement: avoid repeated call on `d/with`
               (reset! conn (:db-after tx-report))
-              (debounced-store-db conn)
+              (dc/store-after-transact! conn tx-report)
               (dc/run-callbacks conn tx-report))
 
             :else
@@ -138,6 +139,14 @@
               ;; notify ui
               (when-let [f @*transact-invalid-callback]
                 (f tx-report errors))
+              (prn :debug :invalid-data
+                   {:tx-meta tx-meta
+                    :tx-data tx-data
+                    :errors errors
+                    :pipeline-tx-data (map
+                                       (fn [[e a v t]]
+                                         [e a v t])
+                                       (:tx-data tx-report))})
               (throw (ex-info "DB write failed with invalid data" {:tx-meta tx-meta
                                                                    :tx-data tx-data
                                                                    :errors errors
@@ -213,7 +222,7 @@
       (vreset! *complete? true)
       (let [tx-data @*batch-tx-data]
         (when (and @*complete? (seq tx-data))
-            ;; transact tx-data to `conn` and validate db
+          ;; transact tx-data to `conn` and validate db
           (transact! conn tx-data tx-meta)))
       (finally
         (d/unlisten! temp-conn ::temp-conn-batch-tx)
@@ -228,21 +237,24 @@
     (try
       (when (:batch-tx @conn)
         (throw (ex-info "batch-transact! can't be nested called" {:tx-meta tx-meta})))
-      (when (fn? listen-db) (d/listen! conn ::batch-tx
-                                       (fn [tx-report]
-                                         (swap! *tx-data into (:tx-data tx-report))
-                                         (listen-db tx-report))))
+      (d/listen! conn ::batch-tx
+                 (fn [tx-report]
+                   (swap! *tx-data into (:tx-data tx-report))
+                   (when (fn? listen-db)
+                     (listen-db tx-report))))
       (swap! conn assoc :skip-store? true :batch-tx? true)
       (batch-tx-fn conn)
-      (when (fn? listen-db) (d/unlisten! conn ::batch-tx))
+
+      (d/unlisten! conn ::batch-tx)
 
       (swap! conn dissoc :skip-store? :batch-tx?)
 
       (when-some [_storage (storage/storage @conn)]
-       (d/store @conn))
+        (d/store @conn))
 
       (let [batch-tx-data @*tx-data
             _ (reset! *tx-data nil)
+            _ (prn :debug :batch-tx-data batch-tx-data)
             tx-report {:db-before db-before
                        :db-after @conn
                        :tx-meta tx-meta

+ 2 - 1
deps/db/src/logseq/db/frontend/validate.cljs

@@ -27,7 +27,8 @@
   [{:keys [db-after tx-data tx-meta]} {:keys [closed-schema?]}]
   (binding [db-malli-schema/*skip-strict-url-validate?* true]
     (let [changed-ids (->> tx-data (keep :e) distinct)
-          tx-datoms (mapcat #(d/datoms db-after :eavt %) changed-ids)
+          datoms (d/datoms db-after :eavt)
+          tx-datoms (mapcat (fn [id] (filter (fn [d] (= (:e d) id)) datoms)) changed-ids)
           ent-maps* (map (fn [[db-id m]]
                          ;; Add :db/id for debugging
                            (assoc m :db/id db-id))

+ 144 - 143
deps/outliner/src/logseq/outliner/core.cljs

@@ -451,17 +451,18 @@
   "Save the `block`."
   [db block opts]
   {:pre [(map? block)]}
-  (let [*txs-state (atom [])
-        block' (if (de/entity? block)
-                 block
-                 (do
-                   (assert (or (:db/id block) (:block/uuid block)) "save-block db/id not exists")
-                   (when-let [eid (or (:db/id block) (when-let [id (:block/uuid block)] [:block/uuid id]))]
-                     (let [ent (d/entity db eid)]
-                       (assert (some? ent) "save-block entity not exists")
-                       (merge ent block)))))]
-    (otree/-save block' *txs-state db opts)
-    {:tx-data @*txs-state}))
+  (let [eid (or (:db/id block) (when-let [id (:block/uuid block)] [:block/uuid id]))]
+    (when (nil? eid)
+      (throw (ex-info "Block eid doesn't exist"
+                      {:block block})))
+    (when-let [entity (d/entity db eid)]
+      (let [*txs-state (atom [])
+            block' (if (de/entity? block)
+                     block
+                     (merge entity block))]
+        (prn :debug :block block')
+        (otree/-save block' *txs-state db opts)
+        {:tx-data @*txs-state}))))
 
 (defn- get-right-siblings
   "Get `node`'s right siblings."
@@ -720,99 +721,99 @@
                                   insert-template? right-sibling-id]
                            :as opts
                            :or {update-timestamps? true}}]
-  {:pre [(seq blocks)
-         (m/validate block-map-or-entity target-block)]}
-  (let [blocks (cond->>
-                (keep (fn [b]
-                        (if-let [eid (or (:db/id b)
-                                         (when-let [id (:block/uuid b)]
-                                           [:block/uuid id]))]
-                          (let [b' (if-let [e (if (de/entity? b) b (d/entity db eid))]
-                                     (merge
-                                      (into {} e)
-                                      {:db/id (:db/id e)
-                                       :block/title (or (:block/raw-title e) (:block/title e))}
+  {:pre [(seq blocks)]}
+  (when (m/validate block-map-or-entity target-block)
+    (let [blocks (cond->>
+                 (keep (fn [b]
+                         (if-let [eid (or (:db/id b)
+                                          (when-let [id (:block/uuid b)]
+                                            [:block/uuid id]))]
+                           (let [b' (if-let [e (if (de/entity? b) b (d/entity db eid))]
+                                      (merge
+                                       (into {} e)
+                                       {:db/id (:db/id e)
+                                        :block/title (or (:block/raw-title e) (:block/title e))}
+                                       b)
                                       b)
-                                     b)
-                                dissoc-keys (concat [:block/tx-id]
-                                                    (when (and (contains? #{:insert-template-blocks :paste} outliner-op)
-                                                               (not (contains? #{:paste-text} outliner-real-op)))
-                                                      [:block/refs]))]
-                            (apply dissoc b' dissoc-keys))
-                          b))
-                      blocks)
-                 (or (= outliner-op :paste)
-                     insert-template?)
-                 (remove ldb/asset?))
-        [target-block sibling?] (get-target-block db blocks target-block opts)
-        _ (assert (some? target-block) (str "Invalid target: " target-block))
-        replace-empty-target? (if (and (some? replace-empty-target?)
-                                       (:block/title target-block)
-                                       (string/blank? (:block/title target-block)))
-                                replace-empty-target?
-                                (and sibling?
-                                     (:block/title target-block)
-                                     (string/blank? (:block/title target-block))
-                                     (> (count blocks) 1)))]
-    (when (seq blocks)
-      (let [blocks' (let [blocks' (blocks-with-level blocks)]
-                      (cond->> (blocks-with-ordered-list-props blocks' target-block sibling?)
-                        update-timestamps?
-                        (mapv #(dissoc % :block/created-at :block/updated-at))
-                        true
-                        (mapv block-with-timestamps)))
-            insert-opts {:sibling? sibling?
-                         :replace-empty-target? replace-empty-target?
-                         :keep-uuid? keep-uuid?
-                         :keep-block-order? keep-block-order?
-                         :outliner-op outliner-op
-                         :insert-template? insert-template?
-                         :right-sibling-id right-sibling-id}
-            {:keys [id->new-uuid blocks-tx]} (insert-blocks-aux db blocks' target-block insert-opts)]
-        (if (some (fn [b] (or (nil? (:block/parent b)) (nil? (:block/order b)))) blocks-tx)
-          (throw (ex-info "Invalid outliner data"
-                          {:opts insert-opts
-                           :tx (vec blocks-tx)
-                           :blocks (vec blocks)
-                           :target-block target-block}))
-          (let [tx (assign-temp-id blocks-tx target-block replace-empty-target?)
-                old-db-id-blocks (->> (filter :block.temp/use-old-db-id? tx)
-                                      (map :block/uuid)
-                                      (set))
-                uuids-tx (->> (map :block/uuid blocks-tx)
-                              (remove old-db-id-blocks)
-                              (remove nil?)
-                              (map (fn [uuid'] {:block/uuid uuid'})))
-                from-property (:logseq.property/created-from-property target-block)
-                many? (= :db.cardinality/many (:db/cardinality from-property))
-                property-values-tx (when (and sibling? from-property many?)
-                                     (let [top-level-blocks (filter #(= 1 (:block/level %)) blocks')]
-                                       (mapcat (fn [block]
-                                                 (when-let [new-id (or (id->new-uuid (:db/id block)) (:block/uuid block))]
-                                                   [{:block/uuid new-id
-                                                     :logseq.property/created-from-property (:db/id from-property)}
-                                                    [:db/add
-                                                     (:db/id (:block/parent target-block))
-                                                     (:db/ident (d/entity db (:db/id from-property)))
-                                                     [:block/uuid new-id]]])) top-level-blocks)))
-                full-tx (common-util/concat-without-nil (if (and keep-uuid? replace-empty-target?) (rest uuids-tx) uuids-tx)
-                                                        tx
-                                                        property-values-tx)
+                                 dissoc-keys (concat [:block/tx-id]
+                                                     (when (and (contains? #{:insert-template-blocks :paste} outliner-op)
+                                                                (not (contains? #{:paste-text} outliner-real-op)))
+                                                       [:block/refs]))]
+                             (apply dissoc b' dissoc-keys))
+                           b))
+                       blocks)
+                  (or (= outliner-op :paste)
+                      insert-template?)
+                  (remove ldb/asset?))
+         [target-block sibling?] (get-target-block db blocks target-block opts)
+         _ (assert (some? target-block) (str "Invalid target: " target-block))
+         replace-empty-target? (if (and (some? replace-empty-target?)
+                                        (:block/title target-block)
+                                        (string/blank? (:block/title target-block)))
+                                 replace-empty-target?
+                                 (and sibling?
+                                      (:block/title target-block)
+                                      (string/blank? (:block/title target-block))
+                                      (> (count blocks) 1)))]
+     (when (seq blocks)
+       (let [blocks' (let [blocks' (blocks-with-level blocks)]
+                       (cond->> (blocks-with-ordered-list-props blocks' target-block sibling?)
+                         update-timestamps?
+                         (mapv #(dissoc % :block/created-at :block/updated-at))
+                         true
+                         (mapv block-with-timestamps)))
+             insert-opts {:sibling? sibling?
+                          :replace-empty-target? replace-empty-target?
+                          :keep-uuid? keep-uuid?
+                          :keep-block-order? keep-block-order?
+                          :outliner-op outliner-op
+                          :insert-template? insert-template?
+                          :right-sibling-id right-sibling-id}
+             {:keys [id->new-uuid blocks-tx]} (insert-blocks-aux db blocks' target-block insert-opts)]
+         (if (some (fn [b] (or (nil? (:block/parent b)) (nil? (:block/order b)))) blocks-tx)
+           (throw (ex-info "Invalid outliner data"
+                           {:opts insert-opts
+                            :tx (vec blocks-tx)
+                            :blocks (vec blocks)
+                            :target-block target-block}))
+           (let [tx (assign-temp-id blocks-tx target-block replace-empty-target?)
+                 old-db-id-blocks (->> (filter :block.temp/use-old-db-id? tx)
+                                       (map :block/uuid)
+                                       (set))
+                 uuids-tx (->> (map :block/uuid blocks-tx)
+                               (remove old-db-id-blocks)
+                               (remove nil?)
+                               (map (fn [uuid'] {:block/uuid uuid'})))
+                 from-property (:logseq.property/created-from-property target-block)
+                 many? (= :db.cardinality/many (:db/cardinality from-property))
+                 property-values-tx (when (and sibling? from-property many?)
+                                      (let [top-level-blocks (filter #(= 1 (:block/level %)) blocks')]
+                                        (mapcat (fn [block]
+                                                  (when-let [new-id (or (id->new-uuid (:db/id block)) (:block/uuid block))]
+                                                    [{:block/uuid new-id
+                                                      :logseq.property/created-from-property (:db/id from-property)}
+                                                     [:db/add
+                                                      (:db/id (:block/parent target-block))
+                                                      (:db/ident (d/entity db (:db/id from-property)))
+                                                      [:block/uuid new-id]]])) top-level-blocks)))
+                 full-tx (common-util/concat-without-nil (if (and keep-uuid? replace-empty-target?) (rest uuids-tx) uuids-tx)
+                                                         tx
+                                                         property-values-tx)
                 ;; Replace entities with eid because Datascript doesn't support entity transaction
-                full-tx' (walk/prewalk
-                          (fn [f]
-                            (cond
-                              (de/entity? f)
-                              (if-let [id (id->new-uuid (:db/id f))]
-                                [:block/uuid id]
-                                (:db/id f))
-                              (map? f)
-                              (dissoc f :block/level)
-                              :else
-                              f))
-                          full-tx)]
-            {:tx-data full-tx'
-             :blocks  tx}))))))
+                 full-tx' (walk/prewalk
+                           (fn [f]
+                             (cond
+                               (de/entity? f)
+                               (if-let [id (id->new-uuid (:db/id f))]
+                                 [:block/uuid id]
+                                 (:db/id f))
+                               (map? f)
+                               (dissoc f :block/level)
+                               :else
+                               f))
+                           full-tx)]
+             {:tx-data full-tx'
+              :blocks  tx})))))))
 
 (defn- sort-non-consecutive-blocks
   [db blocks]
@@ -929,49 +930,49 @@
   "Move `blocks` to `target-block` as siblings or children."
   [conn blocks target-block {:keys [_sibling? _top? _bottom? _up? outliner-op _indent?]
                              :as opts}]
-  {:pre [(seq blocks)
-         (m/validate block-map-or-entity target-block)]}
-  (let [db @conn
-        top-level-blocks (filter-top-level-blocks db blocks)
-        [target-block sibling?] (get-target-block db top-level-blocks target-block opts)
-        non-consecutive? (and (> (count top-level-blocks) 1) (seq (ldb/get-non-consecutive-blocks db top-level-blocks)))
-        top-level-blocks (get-top-level-blocks top-level-blocks non-consecutive?)
-        blocks (->> (if non-consecutive?
-                      (sort-non-consecutive-blocks db top-level-blocks)
-                      top-level-blocks)
-                    (map (fn [block]
-                           (if (de/entity? block)
-                             block
-                             (d/entity db (:db/id block))))))
-        original-position? (move-to-original-position? blocks target-block sibling? non-consecutive?)]
-    (when (and (not (contains? (set (map :db/id blocks)) (:db/id target-block)))
-               (not original-position?))
-      (let [parents' (->> (ldb/get-block-parents db (:block/uuid target-block) {})
-                          (map :db/id)
-                          (set))
-            move-parents-to-child? (some parents' (map :db/id blocks))
-            op-entry [:move-blocks [(mapv :db/id top-level-blocks)
-                                    (:db/id target-block)
-                                    opts]]]
-        (when-not move-parents-to-child?
-          (ldb/batch-transact-with-temp-conn!
-           conn
-           {:outliner-op :move-blocks
-            :outliner-ops [op-entry]}
-           (fn [conn]
-             (doseq [[idx block] (map vector (range (count blocks)) blocks)]
-               (let [first-block? (zero? idx)
-                     sibling? (if first-block? sibling? true)
-                     target-block (if first-block? target-block
-                                      (d/entity @conn (:db/id (nth blocks (dec idx)))))
-                     block (d/entity @conn (:db/id block))]
-                 (when-not (move-to-original-position? [block] target-block sibling? false)
-                   (let [tx-data (move-block @conn block target-block sibling?)]
+  {:pre [(seq blocks)]}
+  (when (m/validate block-map-or-entity target-block)
+    (let [db @conn
+          top-level-blocks (filter-top-level-blocks db blocks)
+          [target-block sibling?] (get-target-block db top-level-blocks target-block opts)
+          non-consecutive? (and (> (count top-level-blocks) 1) (seq (ldb/get-non-consecutive-blocks db top-level-blocks)))
+          top-level-blocks (get-top-level-blocks top-level-blocks non-consecutive?)
+          blocks (->> (if non-consecutive?
+                        (sort-non-consecutive-blocks db top-level-blocks)
+                        top-level-blocks)
+                      (map (fn [block]
+                             (if (de/entity? block)
+                               block
+                               (d/entity db (:db/id block))))))
+          original-position? (move-to-original-position? blocks target-block sibling? non-consecutive?)]
+      (when (and (not (contains? (set (map :db/id blocks)) (:db/id target-block)))
+                 (not original-position?))
+        (let [parents' (->> (ldb/get-block-parents db (:block/uuid target-block) {})
+                            (map :db/id)
+                            (set))
+              move-parents-to-child? (some parents' (map :db/id blocks))
+              op-entry [:move-blocks [(mapv :db/id top-level-blocks)
+                                      (:db/id target-block)
+                                      opts]]]
+          (when-not move-parents-to-child?
+            (ldb/batch-transact-with-temp-conn!
+             conn
+             {:outliner-op :move-blocks
+              :outliner-ops [op-entry]}
+             (fn [conn]
+               (doseq [[idx block] (map vector (range (count blocks)) blocks)]
+                 (let [first-block? (zero? idx)
+                       sibling? (if first-block? sibling? true)
+                       target-block (if first-block? target-block
+                                        (d/entity @conn (:db/id (nth blocks (dec idx)))))
+                       block (d/entity @conn (:db/id block))]
+                   (when-not (move-to-original-position? [block] target-block sibling? false)
+                     (let [tx-data (move-block @conn block target-block sibling?)]
                      ;; FIXME: move-blocks should be pure fn
                      ;; (prn "==>> move blocks tx:" tx-data)
-                     (ldb/transact! conn tx-data {:sibling? sibling?
-                                                  :outliner-op (or outliner-op :move-blocks)})))))))
-          nil)))))
+                       (ldb/transact! conn tx-data {:sibling? sibling?
+                                                    :outliner-op (or outliner-op :move-blocks)})))))))
+            nil))))))
 
 (defn- move-blocks-up-down
   "Move blocks up/down."

+ 109 - 0
scripts/lib/logseq-electron-op-sim.cjs

@@ -0,0 +1,109 @@
+'use strict';
+
+const OPERATION_ORDER = Object.freeze([
+  'add',
+  'copyPaste',
+  'copyPasteTreeToEmptyTarget',
+  'move',
+  'indent',
+  'outdent',
+  'delete',
+  'undo',
+  'redo',
+]);
+
+function buildOperationPlan(totalOps) {
+  if (!Number.isInteger(totalOps) || totalOps <= 0) {
+    throw new Error('totalOps must be a positive integer');
+  }
+
+  const plan = [];
+  for (let i = 0; i < totalOps; i += 1) {
+    plan.push(OPERATION_ORDER[i % OPERATION_ORDER.length]);
+  }
+  return plan;
+}
+
+function chooseRunnableOperation(requestedOperation, managedCount) {
+  switch (requestedOperation) {
+    case 'copyPaste':
+    case 'copyPasteTreeToEmptyTarget':
+      return managedCount >= 1 ? requestedOperation : 'add';
+    case 'move':
+    case 'indent':
+    case 'delete':
+      return managedCount >= 2 ? requestedOperation : 'add';
+    case 'outdent':
+    case 'add':
+    case 'undo':
+    case 'redo':
+      return requestedOperation;
+    default:
+      throw new Error(`Unsupported operation kind: ${requestedOperation}`);
+  }
+}
+
+function parsePositiveInteger(value, flagName) {
+  const parsed = Number.parseInt(value, 10);
+  if (!Number.isInteger(parsed) || parsed <= 0) {
+    throw new Error(`${flagName} must be a positive integer`);
+  }
+  return parsed;
+}
+
+function parseNonNegativeInteger(value, flagName) {
+  const parsed = Number.parseInt(value, 10);
+  if (!Number.isInteger(parsed) || parsed < 0) {
+    throw new Error(`${flagName} must be a non-negative integer`);
+  }
+  return parsed;
+}
+
+function parseArgs(argv) {
+  const result = {
+    ops: 200,
+    port: 9333,
+    undoRedoDelayMs: 350,
+  };
+
+  for (let i = 0; i < argv.length; i += 1) {
+    const arg = argv[i];
+    if (arg === '--help' || arg === '-h') {
+      return { ...result, help: true };
+    }
+
+    const next = argv[i + 1];
+    if (arg === '--ops') {
+      result.ops = parsePositiveInteger(next, '--ops');
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--port') {
+      result.port = parsePositiveInteger(next, '--port');
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--undo-redo-delay-ms') {
+      result.undoRedoDelayMs = parseNonNegativeInteger(next, '--undo-redo-delay-ms');
+      i += 1;
+      continue;
+    }
+
+    throw new Error(`Unknown argument: ${arg}`);
+  }
+
+  if (result.ops < 200) {
+    throw new Error('--ops must be at least 200');
+  }
+
+  return result;
+}
+
+module.exports = {
+  OPERATION_ORDER,
+  buildOperationPlan,
+  chooseRunnableOperation,
+  parseArgs,
+};

+ 1730 - 0
scripts/sync-open-chrome-tab-simulate.cjs

@@ -0,0 +1,1730 @@
+#!/usr/bin/env node
+'use strict';
+
+const { spawn } = require('node:child_process');
+const fs = require('node:fs');
+const fsPromises = require('node:fs/promises');
+const os = require('node:os');
+const path = require('node:path');
+const {
+  buildOperationPlan,
+} = require('./lib/logseq-electron-op-sim.cjs');
+
+const DEFAULT_URL = 'http://localhost:3001/#/';
+const DEFAULT_SESSION_NAME = 'logseq-op-sim';
+const DEFAULT_CHROME_PROFILE = 'auto';
+const DEFAULT_INSTANCES = 1;
+const DEFAULT_OPS = 50;
+const DEFAULT_UNDO_REDO_DELAY_MS = 350;
+const DEFAULT_HEADED = true;
+const DEFAULT_AUTO_CONNECT = false;
+const DEFAULT_RESET_SESSION = true;
+const DEFAULT_TARGET_GRAPH = 'db1';
+const DEFAULT_E2E_PASSWORD = '12345';
+const DEFAULT_SWITCH_GRAPH_TIMEOUT_MS = 120000;
+const DEFAULT_CHROME_LAUNCH_ARGS = [
+  '--new-window',
+  '--no-first-run',
+  '--no-default-browser-check',
+];
+const RENDERER_READY_TIMEOUT_MS = 30000;
+const RENDERER_READY_POLL_DELAY_MS = 250;
+const FALLBACK_PAGE_NAME = 'op-sim-scratch';
+const AGENT_BROWSER_ACTION_TIMEOUT_MS = 180000;
+const PROCESS_TIMEOUT_MS = 240000;
+const AGENT_BROWSER_RETRY_COUNT = 5;
+
+function usage() {
+  return [
+    'Usage: node scripts/sync-open-chrome-tab-simulate.cjs [options]',
+    '',
+    'Options:',
+    `  --url <url>                 URL to open (default: ${DEFAULT_URL})`,
+    `  --session <name>            agent-browser session name (default: ${DEFAULT_SESSION_NAME})`,
+    `  --instances <n>             Number of concurrent browser instances (default: ${DEFAULT_INSTANCES})`,
+    `  --graph <name>              Graph name to switch/download before ops (default: ${DEFAULT_TARGET_GRAPH})`,
+    `  --e2e-password <text>       Password for E2EE modal if prompted (default: ${DEFAULT_E2E_PASSWORD})`,
+    '  --profile <name|path|auto|none> Chrome profile to reuse login state (default: auto)',
+    '                              auto = prefer Default, then logseq.com',
+    '                              none = do not pass --profile to agent-browser (isolated profile)',
+    '                              profile labels are mapped to Chrome profile names',
+    '  --executable-path <path>    Chrome executable path (default: auto-detect system Chrome)',
+    '  --auto-connect              Enable auto-connect to an already running Chrome instance',
+    '  --no-auto-connect           Disable auto-connect to a running Chrome instance',
+    '  --no-reset-session          Do not close the target agent-browser session before starting',
+    `  --switch-timeout-ms <n>     Timeout for graph switch/download bootstrap (default: ${DEFAULT_SWITCH_GRAPH_TIMEOUT_MS})`,
+    `  --ops <n>                   Total operations to execute (must be >= 1, default: ${DEFAULT_OPS})`,
+    `  --undo-redo-delay-ms <n>    Wait time after undo/redo command (default: ${DEFAULT_UNDO_REDO_DELAY_MS})`,
+    '  --headless                  Run agent-browser in headless mode',
+    '  --print-only                Print parsed args only, do not run simulation',
+    '  -h, --help                  Show this message',
+  ].join('\n');
+}
+
+function parsePositiveInteger(value, flagName) {
+  const parsed = Number.parseInt(value, 10);
+  if (!Number.isInteger(parsed) || parsed <= 0) {
+    throw new Error(`${flagName} must be a positive integer`);
+  }
+  return parsed;
+}
+
+function parseNonNegativeInteger(value, flagName) {
+  const parsed = Number.parseInt(value, 10);
+  if (!Number.isInteger(parsed) || parsed < 0) {
+    throw new Error(`${flagName} must be a non-negative integer`);
+  }
+  return parsed;
+}
+
+function parseArgs(argv) {
+  const result = {
+    url: DEFAULT_URL,
+    session: DEFAULT_SESSION_NAME,
+    instances: DEFAULT_INSTANCES,
+    graph: DEFAULT_TARGET_GRAPH,
+    e2ePassword: DEFAULT_E2E_PASSWORD,
+    profile: DEFAULT_CHROME_PROFILE,
+    executablePath: null,
+    autoConnect: DEFAULT_AUTO_CONNECT,
+    resetSession: DEFAULT_RESET_SESSION,
+    switchTimeoutMs: DEFAULT_SWITCH_GRAPH_TIMEOUT_MS,
+    ops: DEFAULT_OPS,
+    undoRedoDelayMs: DEFAULT_UNDO_REDO_DELAY_MS,
+    headed: DEFAULT_HEADED,
+    printOnly: false,
+  };
+
+  for (let i = 0; i < argv.length; i += 1) {
+    const arg = argv[i];
+
+    if (arg === '--help' || arg === '-h') {
+      return { ...result, help: true };
+    }
+
+    if (arg === '--print-only') {
+      result.printOnly = true;
+      continue;
+    }
+
+    if (arg === '--headless') {
+      result.headed = false;
+      continue;
+    }
+
+    if (arg === '--no-auto-connect') {
+      result.autoConnect = false;
+      continue;
+    }
+
+    if (arg === '--auto-connect') {
+      result.autoConnect = true;
+      continue;
+    }
+
+    if (arg === '--no-reset-session') {
+      result.resetSession = false;
+      continue;
+    }
+
+    const next = argv[i + 1];
+
+    if (arg === '--url') {
+      if (typeof next !== 'string' || next.length === 0) {
+        throw new Error('--url must be a non-empty string');
+      }
+      result.url = next;
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--session') {
+      if (typeof next !== 'string' || next.length === 0) {
+        throw new Error('--session must be a non-empty string');
+      }
+      result.session = next;
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--graph') {
+      if (typeof next !== 'string' || next.length === 0) {
+        throw new Error('--graph must be a non-empty string');
+      }
+      result.graph = next;
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--e2e-password') {
+      if (typeof next !== 'string' || next.length === 0) {
+        throw new Error('--e2e-password must be a non-empty string');
+      }
+      result.e2ePassword = next;
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--instances') {
+      result.instances = parsePositiveInteger(next, '--instances');
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--profile') {
+      if (typeof next !== 'string' || next.length === 0) {
+        throw new Error('--profile must be a non-empty string');
+      }
+      result.profile = next;
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--executable-path') {
+      if (typeof next !== 'string' || next.length === 0) {
+        throw new Error('--executable-path must be a non-empty string');
+      }
+      result.executablePath = next;
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--ops') {
+      result.ops = parsePositiveInteger(next, '--ops');
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--undo-redo-delay-ms') {
+      result.undoRedoDelayMs = parseNonNegativeInteger(next, '--undo-redo-delay-ms');
+      i += 1;
+      continue;
+    }
+
+    if (arg === '--switch-timeout-ms') {
+      result.switchTimeoutMs = parsePositiveInteger(next, '--switch-timeout-ms');
+      i += 1;
+      continue;
+    }
+
+    throw new Error(`Unknown argument: ${arg}`);
+  }
+
+  if (result.ops < 1) {
+    throw new Error('--ops must be at least 1');
+  }
+
+  return result;
+}
+
+function spawnAndCapture(cmd, args, options = {}) {
+  const {
+    input,
+    timeoutMs = PROCESS_TIMEOUT_MS,
+    env = process.env,
+  } = options;
+
+  return new Promise((resolve, reject) => {
+    const child = spawn(cmd, args, {
+      stdio: ['pipe', 'pipe', 'pipe'],
+      env,
+    });
+
+    let stdout = '';
+    let stderr = '';
+    let timedOut = false;
+
+    const timer = setTimeout(() => {
+      timedOut = true;
+      child.kill('SIGTERM');
+    }, timeoutMs);
+
+    child.stdout.on('data', (payload) => {
+      stdout += payload.toString();
+    });
+
+    child.stderr.on('data', (payload) => {
+      stderr += payload.toString();
+    });
+
+    child.once('error', (error) => {
+      clearTimeout(timer);
+      reject(error);
+    });
+
+    child.once('exit', (code) => {
+      clearTimeout(timer);
+
+      if (timedOut) {
+        reject(new Error(`Command timed out after ${timeoutMs}ms: ${cmd} ${args.join(' ')}`));
+        return;
+      }
+
+      if (code === 0) {
+        resolve({ code, stdout, stderr });
+        return;
+      }
+
+      const detail = stderr.trim() || stdout.trim();
+      reject(
+        new Error(
+          `Command failed: ${cmd} ${args.join(' ')} (exit ${code})` +
+            (detail ? `\n${detail}` : '')
+        )
+      );
+    });
+
+    if (typeof input === 'string') {
+      child.stdin.write(input);
+    }
+    child.stdin.end();
+  });
+}
+
+function parseJsonOutput(output) {
+  const text = output.trim();
+  if (!text) {
+    throw new Error('Expected JSON output from agent-browser but got empty output');
+  }
+
+  try {
+    return JSON.parse(text);
+  } catch (_error) {
+    const lines = text.split(/\r?\n/).filter(Boolean);
+    const lastLine = lines[lines.length - 1];
+    try {
+      return JSON.parse(lastLine);
+    } catch (error) {
+      throw new Error('Failed to parse JSON output from agent-browser: ' + String(error.message || error));
+    }
+  }
+}
+
+function sleep(ms) {
+  return new Promise((resolve) => setTimeout(resolve, ms));
+}
+
+function sanitizeForFilename(value) {
+  return String(value || 'default').replace(/[^a-zA-Z0-9._-]+/g, '-');
+}
+
+async function pathExists(targetPath) {
+  try {
+    await fsPromises.access(targetPath);
+    return true;
+  } catch (_error) {
+    return false;
+  }
+}
+
+async function copyIfExists(sourcePath, destPath) {
+  if (!(await pathExists(sourcePath))) return false;
+  await fsPromises.mkdir(path.dirname(destPath), { recursive: true });
+  await fsPromises.cp(sourcePath, destPath, {
+    force: true,
+    recursive: true,
+  });
+  return true;
+}
+
+async function detectChromeUserDataRoot() {
+  const home = os.homedir();
+  const candidates = [];
+  if (process.platform === 'darwin') {
+    candidates.push(path.join(home, 'Library', 'Application Support', 'Google', 'Chrome'));
+  } else if (process.platform === 'win32') {
+    const localAppData = process.env.LOCALAPPDATA;
+    if (localAppData) {
+      candidates.push(path.join(localAppData, 'Google', 'Chrome', 'User Data'));
+    }
+  } else {
+    candidates.push(path.join(home, '.config', 'google-chrome'));
+    candidates.push(path.join(home, '.config', 'chromium'));
+  }
+
+  for (const candidate of candidates) {
+    if (await pathExists(candidate)) return candidate;
+  }
+  return null;
+}
+
+async function createIsolatedChromeUserDataDir(sourceProfileName, instanceIndex) {
+  const sourceRoot = await detectChromeUserDataRoot();
+  if (!sourceRoot) {
+    throw new Error('Cannot find Chrome user data root to clone auth profile');
+  }
+
+  const sourceProfileDir = path.join(sourceRoot, sourceProfileName);
+  if (!(await pathExists(sourceProfileDir))) {
+    throw new Error(`Cannot find Chrome profile directory to clone: ${sourceProfileDir}`);
+  }
+
+  const targetRoot = path.join(
+    os.tmpdir(),
+    `logseq-op-sim-user-data-${sanitizeForFilename(sourceProfileName)}-${instanceIndex}`
+  );
+  const targetDefaultProfileDir = path.join(targetRoot, 'Default');
+  await fsPromises.rm(targetRoot, { recursive: true, force: true });
+  await fsPromises.mkdir(targetDefaultProfileDir, { recursive: true });
+
+  await copyIfExists(path.join(sourceRoot, 'Local State'), path.join(targetRoot, 'Local State'));
+
+  const entries = [
+    'Network',
+    'Cookies',
+    'Local Storage',
+    'Session Storage',
+    'IndexedDB',
+    'WebStorage',
+    'Preferences',
+    'Secure Preferences',
+  ];
+  for (const entry of entries) {
+    await copyIfExists(
+      path.join(sourceProfileDir, entry),
+      path.join(targetDefaultProfileDir, entry)
+    );
+  }
+
+  return targetRoot;
+}
+
+function buildChromeLaunchArgs(url) {
+  return [
+    `--app=${url}`,
+    ...DEFAULT_CHROME_LAUNCH_ARGS,
+  ];
+}
+
+function isRetryableAgentBrowserError(error) {
+  const message = String(error?.message || error || '');
+  return (
+    /daemon may be busy or unresponsive/i.test(message) ||
+    /resource temporarily unavailable/i.test(message) ||
+    /os error 35/i.test(message) ||
+    /EAGAIN/i.test(message)
+  );
+}
+
+async function listChromeProfiles() {
+  try {
+    const { stdout } = await spawnAndCapture('agent-browser', ['profiles']);
+    const lines = stdout.split(/\r?\n/);
+    const profiles = [];
+
+    for (const line of lines) {
+      const match = line.match(/^\s+(.+?)\s+\((.+?)\)\s*$/);
+      if (!match) continue;
+      profiles.push({
+        profile: match[1].trim(),
+        label: match[2].trim(),
+      });
+    }
+
+    return profiles;
+  } catch (_error) {
+    return [];
+  }
+}
+
+async function detectChromeProfile() {
+  const profiles = await listChromeProfiles();
+  if (profiles.length > 0) {
+
+    const defaultProfile = profiles.find((item) => item.profile === 'Default');
+    if (defaultProfile) return defaultProfile.profile;
+
+    return profiles[0].profile;
+  }
+
+  return 'Default';
+}
+
+async function detectChromeExecutablePath() {
+  const candidates = [
+    '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome',
+    `${process.env.HOME || ''}/Applications/Google Chrome.app/Contents/MacOS/Google Chrome`,
+    '/usr/bin/google-chrome',
+    '/usr/bin/google-chrome-stable',
+    '/usr/bin/chromium',
+    '/usr/bin/chromium-browser',
+  ].filter(Boolean);
+
+  for (const candidate of candidates) {
+    try {
+      await fsPromises.access(candidate, fs.constants.X_OK);
+      return candidate;
+    } catch (_error) {
+      // keep trying
+    }
+  }
+
+  return null;
+}
+
+function expandHome(inputPath) {
+  if (typeof inputPath !== 'string') return inputPath;
+  if (!inputPath.startsWith('~')) return inputPath;
+  return path.join(os.homedir(), inputPath.slice(1));
+}
+
+function looksLikePath(value) {
+  return value.includes('/') || value.includes('\\') || value.startsWith('~') || value.startsWith('.');
+}
+
+async function resolveProfileArgument(profile) {
+  if (!profile) return null;
+
+  if (looksLikePath(profile)) {
+    return expandHome(profile);
+  }
+
+  let profileName = profile;
+  const profiles = await listChromeProfiles();
+  if (profiles.length > 0) {
+    const byLabel = profiles.find((item) => item.label.toLowerCase() === profile.toLowerCase());
+    if (byLabel) {
+      profileName = byLabel.profile;
+    }
+  }
+
+  return profileName;
+}
+
+async function runAgentBrowser(session, commandArgs, options = {}) {
+  const {
+    retries = AGENT_BROWSER_RETRY_COUNT,
+    ...commandOptions
+  } = options;
+
+  const env = {
+    ...process.env,
+    AGENT_BROWSER_DEFAULT_TIMEOUT: String(AGENT_BROWSER_ACTION_TIMEOUT_MS),
+  };
+
+  const globalFlags = ['--session', session];
+  if (commandOptions.headed) {
+    globalFlags.push('--headed');
+  }
+  if (commandOptions.autoConnect) {
+    globalFlags.push('--auto-connect');
+  }
+  if (commandOptions.profile) {
+    globalFlags.push('--profile', commandOptions.profile);
+  }
+  if (commandOptions.state) {
+    globalFlags.push('--state', commandOptions.state);
+  }
+  if (Array.isArray(commandOptions.launchArgs) && commandOptions.launchArgs.length > 0) {
+    globalFlags.push('--args', commandOptions.launchArgs.join(','));
+  }
+  if (commandOptions.executablePath) {
+    globalFlags.push('--executable-path', commandOptions.executablePath);
+  }
+
+  let lastError = null;
+  for (let attempt = 0; attempt <= retries; attempt += 1) {
+    try {
+      const { stdout, stderr } = await spawnAndCapture(
+        'agent-browser',
+        [...globalFlags, ...commandArgs, '--json'],
+        {
+          ...commandOptions,
+          env,
+        }
+      );
+
+      const parsed = parseJsonOutput(stdout);
+      if (!parsed || parsed.success !== true) {
+        const fallback = stderr.trim() || stdout.trim();
+        throw new Error('agent-browser command failed: ' + (fallback || 'unknown error'));
+      }
+      return parsed;
+    } catch (error) {
+      lastError = error;
+      if (attempt >= retries || !isRetryableAgentBrowserError(error)) {
+        throw error;
+      }
+      await sleep((attempt + 1) * 250);
+    }
+  }
+
+  throw lastError || new Error('agent-browser command failed');
+}
+
+function urlMatchesTarget(candidate, targetUrl) {
+  if (typeof candidate !== 'string' || typeof targetUrl !== 'string') return false;
+  if (candidate === targetUrl) return true;
+  if (candidate.startsWith(targetUrl)) return true;
+  try {
+    const candidateUrl = new URL(candidate);
+    const target = new URL(targetUrl);
+    return (
+      candidateUrl.origin === target.origin &&
+      candidateUrl.pathname === target.pathname
+    );
+  } catch (_error) {
+    return false;
+  }
+}
+
+async function ensureActiveTabOnTargetUrl(session, targetUrl, runOptions) {
+  const currentUrlResult = await runAgentBrowser(session, ['get', 'url'], runOptions);
+  const currentUrl = currentUrlResult?.data?.url;
+  if (urlMatchesTarget(currentUrl, targetUrl)) {
+    return;
+  }
+
+  const tabList = await runAgentBrowser(session, ['tab', 'list'], runOptions);
+  const tabs = Array.isArray(tabList?.data?.tabs) ? tabList.data.tabs : [];
+  const matchedTab = tabs.find((tab) => urlMatchesTarget(tab?.url, targetUrl));
+  if (matchedTab && Number.isInteger(matchedTab.index)) {
+    await runAgentBrowser(session, ['tab', String(matchedTab.index)], runOptions);
+    return;
+  }
+
+  const created = await runAgentBrowser(session, ['tab', 'new', targetUrl], runOptions);
+  const createdIndex = created?.data?.index;
+  if (Number.isInteger(createdIndex)) {
+    await runAgentBrowser(session, ['tab', String(createdIndex)], runOptions);
+  }
+}
+
+function buildRendererProgram(config) {
+  return `(() => (async () => {
+    const config = ${JSON.stringify(config)};
+    const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
+    const randomItem = (items) => items[Math.floor(Math.random() * items.length)];
+    const shuffle = (items) => [...items].sort(() => Math.random() - 0.5);
+    const describeError = (error) => String(error?.message || error);
+    const asPageName = (pageLike) => {
+      if (typeof pageLike === 'string' && pageLike.length > 0) return pageLike;
+      if (!pageLike || typeof pageLike !== 'object') return null;
+      if (typeof pageLike.name === 'string' && pageLike.name.length > 0) return pageLike.name;
+      if (typeof pageLike.originalName === 'string' && pageLike.originalName.length > 0) return pageLike.originalName;
+      if (typeof pageLike.title === 'string' && pageLike.title.length > 0) return pageLike.title;
+      return null;
+    };
+
+    const waitForEditorReady = async () => {
+      const deadline = Date.now() + config.readyTimeoutMs;
+      let lastError = null;
+
+      while (Date.now() < deadline) {
+        try {
+          if (
+            globalThis.logseq?.api &&
+            typeof logseq.api.get_current_block === 'function' &&
+            (
+              typeof logseq.api.get_current_page === 'function' ||
+              typeof logseq.api.get_today_page === 'function'
+            ) &&
+            typeof logseq.api.append_block_in_page === 'function'
+          ) {
+            return;
+          }
+        } catch (error) {
+          lastError = error;
+        }
+
+        await sleep(config.readyPollDelayMs);
+      }
+
+      if (lastError) {
+        throw new Error('Logseq editor readiness timed out: ' + describeError(lastError));
+      }
+      throw new Error('Logseq editor readiness timed out: logseq.api is unavailable');
+    };
+
+    const runPrefix =
+      typeof config.runPrefix === 'string' && config.runPrefix.length > 0
+        ? config.runPrefix
+        : config.markerPrefix;
+
+    const chooseRunnableOperation = (requestedOperation, operableCount) => {
+      if (requestedOperation === 'copyPaste' || requestedOperation === 'copyPasteTreeToEmptyTarget') {
+        return operableCount >= 1 ? requestedOperation : 'add';
+      }
+      if (requestedOperation === 'move' || requestedOperation === 'indent' || requestedOperation === 'delete') {
+        return operableCount >= 2 ? requestedOperation : 'add';
+      }
+      return requestedOperation;
+    };
+
+    const flattenBlocks = (nodes, acc = []) => {
+      if (!Array.isArray(nodes)) return acc;
+      for (const node of nodes) {
+        if (!node) continue;
+        acc.push(node);
+        if (Array.isArray(node.children) && node.children.length > 0) {
+          flattenBlocks(node.children, acc);
+        }
+      }
+      return acc;
+    };
+
+    const isClientBlock = (block) =>
+      typeof block?.content === 'string' && block.content.startsWith(config.markerPrefix);
+
+    const isOperableBlock = (block) =>
+      typeof block?.content === 'string' && block.content.startsWith(runPrefix);
+
+    const listOperableBlocks = async () => {
+      const tree = await logseq.api.get_current_page_blocks_tree();
+      const flattened = flattenBlocks(tree, []);
+      return flattened.filter(isOperableBlock);
+    };
+
+    const listManagedBlocks = async () => {
+      const operableBlocks = await listOperableBlocks();
+      return operableBlocks.filter(isClientBlock);
+    };
+
+    const pickIndentCandidate = async (blocks) => {
+      for (const candidate of shuffle(blocks)) {
+        const prev = await logseq.api.get_previous_sibling_block(candidate.uuid);
+        if (prev?.uuid) return candidate;
+      }
+      return null;
+    };
+
+    const pickOutdentCandidate = async (blocks) => {
+      for (const candidate of shuffle(blocks)) {
+        const full = await logseq.api.get_block(candidate.uuid, { includeChildren: false });
+        const parentId = full?.parent?.id;
+        const pageId = full?.page?.id;
+        if (parentId && pageId && parentId !== pageId) {
+          return candidate;
+        }
+      }
+      return null;
+    };
+
+    const toBatchTree = (block) => ({
+      content: typeof block?.content === 'string' ? block.content : '',
+      children: Array.isArray(block?.children) ? block.children.map(toBatchTree) : [],
+    });
+
+    const getAnchor = async () => {
+      const deadline = Date.now() + config.readyTimeoutMs;
+      let lastError = null;
+
+      while (Date.now() < deadline) {
+        try {
+          const currentBlock = await logseq.api.get_current_block();
+          if (currentBlock && currentBlock.uuid) {
+            return currentBlock;
+          }
+
+          if (typeof logseq.api.get_current_page === 'function') {
+            const currentPage = await logseq.api.get_current_page();
+            const currentPageName = asPageName(currentPage);
+            if (currentPageName) {
+              const seeded = await logseq.api.append_block_in_page(
+                currentPageName,
+                config.markerPrefix + ' anchor',
+                {}
+              );
+              if (seeded?.uuid) return seeded;
+            }
+          }
+
+          if (typeof logseq.api.get_today_page === 'function') {
+            const todayPage = await logseq.api.get_today_page();
+            const todayPageName = asPageName(todayPage);
+            if (todayPageName) {
+              const seeded = await logseq.api.append_block_in_page(
+                todayPageName,
+                config.markerPrefix + ' anchor',
+                {}
+              );
+              if (seeded?.uuid) return seeded;
+            }
+          }
+
+          {
+            const seeded = await logseq.api.append_block_in_page(
+              config.fallbackPageName,
+              config.markerPrefix + ' anchor',
+              {}
+            );
+            if (seeded?.uuid) return seeded;
+          }
+        } catch (error) {
+          lastError = error;
+        }
+
+        await sleep(config.readyPollDelayMs);
+      }
+
+      if (lastError) {
+        throw new Error('Unable to resolve anchor block: ' + describeError(lastError));
+      }
+      throw new Error('Unable to resolve anchor block: open a graph and page, then retry');
+    };
+
+    const counts = {
+      add: 0,
+      delete: 0,
+      move: 0,
+      indent: 0,
+      outdent: 0,
+      undo: 0,
+      redo: 0,
+      copyPaste: 0,
+      copyPasteTreeToEmptyTarget: 0,
+      fallbackAdd: 0,
+      errors: 0,
+    };
+
+    const errors = [];
+    const operationLog = [];
+
+    await waitForEditorReady();
+    const anchor = await getAnchor();
+
+    if (!(await listManagedBlocks()).length) {
+      await logseq.api.insert_block(anchor.uuid, config.markerPrefix + ' seed', {
+        sibling: true,
+        before: false,
+        focus: false,
+      });
+    }
+
+    let executed = 0;
+
+    for (let i = 0; i < config.plan.length; i += 1) {
+      const requested = config.plan[i];
+      const operable = await listOperableBlocks();
+      let operation = chooseRunnableOperation(requested, operable.length);
+      if (operation !== requested) {
+        counts.fallbackAdd += 1;
+      }
+
+      try {
+        await sleep(Math.floor(Math.random() * 40));
+
+        if (operation === 'add') {
+          const target = operable.length > 0 ? randomItem(operable) : anchor;
+          const content = config.markerPrefix + ' add-' + i;
+          await logseq.api.insert_block(target.uuid, content, {
+            sibling: true,
+            before: false,
+            focus: false,
+          });
+        }
+
+        if (operation === 'copyPaste') {
+          const source = randomItem(operable);
+          const target = randomItem(operable);
+          await logseq.api.select_block(source.uuid);
+          await logseq.api.invoke_external_command('logseq.editor/copy');
+          const latestSource = await logseq.api.get_block(source.uuid);
+          await logseq.api.insert_block(target.uuid, latestSource?.content || source.content || '', {
+            sibling: true,
+            before: false,
+            focus: false,
+          });
+        }
+
+        if (operation === 'copyPasteTreeToEmptyTarget') {
+          const source = randomItem(operable);
+          const sourceTree = await logseq.api.get_block(source.uuid, { includeChildren: true });
+          if (!sourceTree?.uuid) {
+            throw new Error('Failed to load source tree block');
+          }
+
+          const treeTarget = operable.length > 0 ? randomItem(operable) : anchor;
+          const emptyTarget = await logseq.api.insert_block(treeTarget.uuid, config.markerPrefix + ' tree-target-' + i, {
+            sibling: true,
+            before: false,
+            focus: false,
+          });
+          if (!emptyTarget?.uuid) {
+            throw new Error('Failed to create empty target block');
+          }
+
+          await logseq.api.update_block(emptyTarget.uuid, '');
+          await logseq.api.insert_batch_block(emptyTarget.uuid, toBatchTree(sourceTree), { sibling: false });
+        }
+
+        if (operation === 'move') {
+          const source = randomItem(operable);
+          const candidates = operable.filter((block) => block.uuid !== source.uuid);
+          const target = randomItem(candidates);
+          await logseq.api.move_block(source.uuid, target.uuid, {
+            before: Math.random() < 0.5,
+            children: false,
+          });
+        }
+
+        if (operation === 'indent') {
+          const candidate = await pickIndentCandidate(operable);
+          if (!candidate?.uuid) {
+            throw new Error('No block can be indented in current operable set');
+          }
+          await logseq.api.select_block(candidate.uuid);
+          await logseq.api.invoke_external_command('logseq.editor/indent');
+        }
+
+        if (operation === 'outdent') {
+          const candidate = await pickOutdentCandidate(operable);
+          if (!candidate?.uuid) {
+            throw new Error('No block can be outdented in current operable set');
+          }
+          await logseq.api.select_block(candidate.uuid);
+          await logseq.api.invoke_external_command('logseq.editor/outdent');
+        }
+
+        if (operation === 'delete') {
+          const candidates = operable.filter((block) => block.uuid !== anchor.uuid);
+          const victimPool = candidates.length > 0 ? candidates : operable;
+          const victim = randomItem(victimPool);
+          await logseq.api.remove_block(victim.uuid);
+        }
+
+        if (operation === 'undo') {
+          await logseq.api.invoke_external_command('logseq.editor/undo');
+          await sleep(config.undoRedoDelayMs);
+        }
+
+        if (operation === 'redo') {
+          await logseq.api.invoke_external_command('logseq.editor/redo');
+          await sleep(config.undoRedoDelayMs);
+        }
+
+        counts[operation] += 1;
+        executed += 1;
+        operationLog.push({ index: i, requested, executedAs: operation });
+      } catch (error) {
+        counts.errors += 1;
+        errors.push({
+          index: i,
+          requested,
+          attempted: operation,
+          message: String(error?.message || error),
+        });
+
+        try {
+          const recoveryOperable = await listOperableBlocks();
+          const target = recoveryOperable.length > 0 ? randomItem(recoveryOperable) : anchor;
+          await logseq.api.insert_block(target.uuid, config.markerPrefix + ' recovery-' + i, {
+            sibling: true,
+            before: false,
+            focus: false,
+          });
+          counts.add += 1;
+          executed += 1;
+          operationLog.push({ index: i, requested, executedAs: 'add' });
+        } catch (recoveryError) {
+          errors.push({
+            index: i,
+            requested,
+            attempted: 'recovery-add',
+            message: String(recoveryError?.message || recoveryError),
+          });
+          break;
+        }
+      }
+    }
+
+    const finalManaged = await listManagedBlocks();
+    return {
+      ok: true,
+      requestedOps: config.plan.length,
+      executedOps: executed,
+      counts,
+      markerPrefix: config.markerPrefix,
+      anchorUuid: anchor.uuid,
+      finalManagedCount: finalManaged.length,
+      sampleManaged: finalManaged.slice(0, 5).map((block) => ({
+        uuid: block.uuid,
+        content: block.content,
+      })),
+      errorCount: errors.length,
+      errors: errors.slice(0, 20),
+      opLogSample: operationLog.slice(0, 20),
+    };
+  })())()`;
+}
+
+function buildGraphBootstrapProgram(config) {
+  return `(() => (async () => {
+    const config = ${JSON.stringify(config)};
+    const lower = (value) => String(value || '').toLowerCase();
+    const targetGraphLower = lower(config.graphName);
+    const stateKey = '__logseqOpBootstrapState';
+    const state = (window[stateKey] && typeof window[stateKey] === 'object') ? window[stateKey] : {};
+    window[stateKey] = state;
+    if (state.targetGraph !== config.graphName || state.runId !== config.runId) {
+      state.initialGraphName = null;
+      state.initialRepoName = null;
+      state.initialTargetMatched = null;
+      state.passwordAttempts = 0;
+      state.refreshCount = 0;
+      state.graphDetected = false;
+      state.graphCardClicked = false;
+      state.passwordSubmitted = false;
+      state.actionTriggered = false;
+      state.gotoGraphsOk = false;
+      state.gotoGraphsError = null;
+      state.downloadStarted = false;
+      state.downloadCompleted = false;
+      state.downloadCompletionSource = null;
+      state.lastDownloadLog = null;
+      state.lastRefreshAt = 0;
+      state.lastGraphClickAt = 0;
+      state.targetStateStableHits = 0;
+      state.switchAttempts = 0;
+    }
+    state.runId = config.runId;
+    state.targetGraph = config.graphName;
+    if (typeof state.passwordAttempts !== 'number') state.passwordAttempts = 0;
+    if (typeof state.refreshCount !== 'number') state.refreshCount = 0;
+    if (typeof state.graphDetected !== 'boolean') state.graphDetected = false;
+    if (typeof state.graphCardClicked !== 'boolean') state.graphCardClicked = false;
+    if (typeof state.passwordSubmitted !== 'boolean') state.passwordSubmitted = false;
+    if (typeof state.actionTriggered !== 'boolean') state.actionTriggered = false;
+    if (typeof state.gotoGraphsOk !== 'boolean') state.gotoGraphsOk = false;
+    if (typeof state.gotoGraphsError !== 'string' && state.gotoGraphsError !== null) state.gotoGraphsError = null;
+    if (typeof state.downloadStarted !== 'boolean') state.downloadStarted = false;
+    if (typeof state.downloadCompleted !== 'boolean') state.downloadCompleted = false;
+    if (typeof state.downloadCompletionSource !== 'string' && state.downloadCompletionSource !== null) {
+      state.downloadCompletionSource = null;
+    }
+    if (typeof state.lastDownloadLog !== 'object' && state.lastDownloadLog !== null) {
+      state.lastDownloadLog = null;
+    }
+    if (typeof state.initialRepoName !== 'string' && state.initialRepoName !== null) {
+      state.initialRepoName = null;
+    }
+    if (typeof state.initialTargetMatched !== 'boolean' && state.initialTargetMatched !== null) {
+      state.initialTargetMatched = null;
+    }
+    if (typeof state.lastRefreshAt !== 'number') {
+      state.lastRefreshAt = 0;
+    }
+    if (typeof state.lastGraphClickAt !== 'number') {
+      state.lastGraphClickAt = 0;
+    }
+    if (typeof state.targetStateStableHits !== 'number') {
+      state.targetStateStableHits = 0;
+    }
+    if (typeof state.switchAttempts !== 'number') {
+      state.switchAttempts = 0;
+    }
+
+    const setInputValue = (input, value) => {
+      if (!input) return;
+      const setter = Object.getOwnPropertyDescriptor(window.HTMLInputElement.prototype, 'value')?.set;
+      if (setter) {
+        setter.call(input, value);
+      } else {
+        input.value = value;
+      }
+      input.dispatchEvent(new Event('input', { bubbles: true }));
+      input.dispatchEvent(new Event('change', { bubbles: true }));
+    };
+
+    const dispatchClick = (node) => {
+      if (!(node instanceof HTMLElement)) return false;
+      try {
+        node.scrollIntoView({ block: 'center', inline: 'center' });
+      } catch (_error) {
+        // ignore scroll failures
+      }
+
+      try {
+        node.focus();
+      } catch (_error) {
+        // ignore focus failures
+      }
+
+      try {
+        node.click();
+      } catch (_error) {
+        // continue with explicit events
+      }
+
+      node.dispatchEvent(new MouseEvent('mousedown', { view: window, bubbles: true, cancelable: true }));
+      node.dispatchEvent(new MouseEvent('mouseup', { view: window, bubbles: true, cancelable: true }));
+      node.dispatchEvent(new MouseEvent('click', { view: window, bubbles: true, cancelable: true }));
+      return true;
+    };
+
+    const graphNameMatchesTarget = (graphName) => {
+      const value = lower(graphName);
+      if (!value) return false;
+      return (
+        value === targetGraphLower ||
+        value.endsWith('/' + targetGraphLower) ||
+        value.endsWith('_' + targetGraphLower) ||
+        value.includes('logseq_db_' + targetGraphLower)
+      );
+    };
+
+    const stateMatchesTarget = (repoName, graphName) => {
+      const hasRepo = typeof repoName === 'string' && repoName.length > 0;
+      const hasGraph = typeof graphName === 'string' && graphName.length > 0;
+      const repoMatches = hasRepo ? graphNameMatchesTarget(repoName) : false;
+      const graphMatches = hasGraph ? graphNameMatchesTarget(graphName) : false;
+      if (hasRepo && hasGraph) {
+        return repoMatches && graphMatches;
+      }
+      if (hasRepo) return repoMatches;
+      if (hasGraph) return graphMatches;
+      return false;
+    };
+
+    const listGraphCards = () =>
+      Array.from(document.querySelectorAll('div[data-testid^="logseq_db_"]'));
+
+    const findGraphCard = () => {
+      const exact = document.querySelector('div[data-testid="logseq_db_' + config.graphName + '"]');
+      if (exact) return exact;
+
+      const byTestId = listGraphCards()
+        .find((card) => lower(card.getAttribute('data-testid')).includes(targetGraphLower));
+      if (byTestId) return byTestId;
+
+      return listGraphCards()
+        .find((card) => lower(card.textContent).includes(targetGraphLower));
+    };
+
+    const clickRefresh = () => {
+      const candidates = Array.from(document.querySelectorAll('button,span,a'));
+      const refreshNode = candidates.find((el) => (el.textContent || '').trim() === 'Refresh');
+      const clickable = refreshNode ? (refreshNode.closest('button') || refreshNode) : null;
+      return dispatchClick(clickable);
+    };
+
+    const clickGraphCard = (card) => {
+      if (!card) return false;
+      const anchors = Array.from(card.querySelectorAll('a'));
+      const exactAnchor = anchors.find((el) => lower(el.textContent).trim() === targetGraphLower);
+      const looseAnchor = anchors.find((el) => lower(el.textContent).includes(targetGraphLower));
+      const anyAnchor = anchors[0];
+      const actionButton = Array.from(card.querySelectorAll('button'))
+        .find((el) => lower(el.textContent).includes(targetGraphLower));
+      const target = exactAnchor || looseAnchor || anyAnchor || actionButton || card;
+      return dispatchClick(target);
+    };
+
+    const getCurrentGraphName = async () => {
+      try {
+        if (!globalThis.logseq?.api?.get_current_graph) return null;
+        const current = await logseq.api.get_current_graph();
+        if (!current || typeof current !== 'object') return null;
+        if (typeof current.name === 'string' && current.name.length > 0) return current.name;
+        if (typeof current.url === 'string' && current.url.length > 0) {
+          const parts = current.url.split('/').filter(Boolean);
+          return parts[parts.length - 1] || null;
+        }
+      } catch (_error) {
+        // ignore
+      }
+      return null;
+    };
+
+    const getCurrentRepoName = () => {
+      try {
+        if (!globalThis.logseq?.api?.get_state_from_store) return null;
+        const value = logseq.api.get_state_from_store(['git/current-repo']);
+        return typeof value === 'string' && value.length > 0 ? value : null;
+      } catch (_error) {
+        return null;
+      }
+    };
+
+    const getDownloadingGraphUuid = () => {
+      try {
+        if (!globalThis.logseq?.api?.get_state_from_store) return null;
+        return logseq.api.get_state_from_store(['rtc/downloading-graph-uuid']);
+      } catch (_error) {
+        return null;
+      }
+    };
+
+    const getRtcLog = () => {
+      try {
+        if (!globalThis.logseq?.api?.get_state_from_store) return null;
+        return logseq.api.get_state_from_store(['rtc/log']);
+      } catch (_error) {
+        return null;
+      }
+    };
+
+    const asLower = (value) => String(value || '').toLowerCase();
+    const parseRtcDownloadLog = (value) => {
+      if (!value || typeof value !== 'object') return null;
+      const type = value.type || value['type'] || null;
+      const typeLower = asLower(type);
+      if (!typeLower.includes('rtc.log/download')) return null;
+
+      const subType =
+        value['sub-type'] ||
+        value.subType ||
+        value.subtype ||
+        value.sub_type ||
+        null;
+      const graphUuid =
+        value['graph-uuid'] ||
+        value.graphUuid ||
+        value.graph_uuid ||
+        null;
+      const message = value.message || null;
+      return {
+        type: String(type || ''),
+        subType: String(subType || ''),
+        graphUuid: graphUuid ? String(graphUuid) : null,
+        message: message ? String(message) : null,
+      };
+    };
+
+    const probeGraphReady = async () => {
+      try {
+        if (!globalThis.logseq?.api?.get_current_page_blocks_tree) {
+          return { ok: false, reason: 'get_current_page_blocks_tree unavailable' };
+        }
+        await logseq.api.get_current_page_blocks_tree();
+        return { ok: true, reason: null };
+      } catch (error) {
+        return { ok: false, reason: String(error?.message || error) };
+      }
+    };
+
+    const initialGraphName = await getCurrentGraphName();
+    const initialRepoName = getCurrentRepoName();
+    const initialTargetMatched = stateMatchesTarget(initialRepoName, initialGraphName);
+    if (!state.initialGraphName && initialGraphName) {
+      state.initialGraphName = initialGraphName;
+    }
+    if (!state.initialRepoName && initialRepoName) {
+      state.initialRepoName = initialRepoName;
+    }
+    if (state.initialTargetMatched === null) {
+      state.initialTargetMatched = initialTargetMatched;
+    }
+
+    const shouldForceSelection =
+      (config.forceSelection === true && !state.graphCardClicked && !state.downloadStarted) ||
+      !initialTargetMatched;
+    let onGraphsPage = location.hash.includes('/graphs');
+    if ((shouldForceSelection || !initialTargetMatched) && !onGraphsPage) {
+      try {
+        location.hash = '#/graphs';
+        state.gotoGraphsOk = true;
+      } catch (error) {
+        state.gotoGraphsError = String(error?.message || error);
+      }
+      onGraphsPage = location.hash.includes('/graphs');
+    }
+
+    const modal = document.querySelector('.e2ee-password-modal-content');
+    const passwordModalVisible = !!modal;
+    let passwordAttempted = false;
+    let passwordSubmittedThisStep = false;
+    if (modal) {
+      const passwordInputs = Array.from(
+        modal.querySelectorAll('input[type="password"], .ls-toggle-password-input input, input')
+      );
+      if (passwordInputs.length >= 2) {
+        setInputValue(passwordInputs[0], config.password);
+        setInputValue(passwordInputs[1], config.password);
+        passwordAttempted = true;
+      } else if (passwordInputs.length === 1) {
+        setInputValue(passwordInputs[0], config.password);
+        passwordAttempted = true;
+      }
+
+      if (passwordAttempted) {
+        state.passwordAttempts += 1;
+      }
+
+      const submitButton = Array.from(modal.querySelectorAll('button'))
+        .find((button) => /(submit|open|unlock|confirm|enter)/i.test((button.textContent || '').trim()));
+      if (submitButton && !submitButton.disabled) {
+        passwordSubmittedThisStep = dispatchClick(submitButton);
+        state.passwordSubmitted = state.passwordSubmitted || passwordSubmittedThisStep;
+        state.actionTriggered = state.actionTriggered || passwordSubmittedThisStep;
+      }
+    }
+
+    let graphCardClickedThisStep = false;
+    let refreshClickedThisStep = false;
+    if (location.hash.includes('/graphs')) {
+      const card = findGraphCard();
+      if (card) {
+        const now = Date.now();
+        state.graphDetected = true;
+        if (!state.graphCardClicked && now - state.lastGraphClickAt >= 500) {
+          graphCardClickedThisStep = clickGraphCard(card);
+          if (graphCardClickedThisStep) {
+            state.lastGraphClickAt = now;
+            state.switchAttempts += 1;
+          }
+          state.graphCardClicked = state.graphCardClicked || graphCardClickedThisStep;
+          state.actionTriggered = state.actionTriggered || graphCardClickedThisStep;
+        }
+      } else {
+        const now = Date.now();
+        if (now - state.lastRefreshAt >= 2000) {
+          refreshClickedThisStep = clickRefresh();
+          if (refreshClickedThisStep) {
+            state.refreshCount += 1;
+            state.lastRefreshAt = now;
+          }
+        }
+      }
+    }
+
+    const downloadingGraphUuid = getDownloadingGraphUuid();
+    if (downloadingGraphUuid) {
+      state.actionTriggered = true;
+      state.downloadStarted = true;
+    }
+
+    const rtcDownloadLog = parseRtcDownloadLog(getRtcLog());
+    if (rtcDownloadLog) {
+      state.lastDownloadLog = rtcDownloadLog;
+      const subTypeLower = asLower(rtcDownloadLog.subType);
+      const messageLower = asLower(rtcDownloadLog.message);
+      if (subTypeLower.includes('download-progress') || subTypeLower.includes('downloadprogress')) {
+        state.downloadStarted = true;
+      }
+      if (
+        (subTypeLower.includes('download-completed') || subTypeLower.includes('downloadcompleted')) &&
+        messageLower.includes('ready')
+      ) {
+        state.downloadStarted = true;
+        state.downloadCompleted = true;
+        state.downloadCompletionSource = 'rtc-log';
+      }
+    }
+
+    const currentGraphName = await getCurrentGraphName();
+    const currentRepoName = getCurrentRepoName();
+    const onGraphsPageFinal = location.hash.includes('/graphs');
+    const repoMatchesTarget = graphNameMatchesTarget(currentRepoName);
+    const graphMatchesTarget = graphNameMatchesTarget(currentGraphName);
+    const switchedToTargetGraph = stateMatchesTarget(currentRepoName, currentGraphName) && !onGraphsPageFinal;
+    if (switchedToTargetGraph) {
+      state.targetStateStableHits += 1;
+    } else {
+      state.targetStateStableHits = 0;
+    }
+    if (
+      !switchedToTargetGraph &&
+      !onGraphsPageFinal &&
+      !passwordModalVisible &&
+      !state.downloadStarted &&
+      !state.graphCardClicked
+    ) {
+      try {
+        location.hash = '#/graphs';
+        state.gotoGraphsOk = true;
+      } catch (error) {
+        state.gotoGraphsError = String(error?.message || error);
+      }
+    }
+    const needsReadinessProbe =
+      switchedToTargetGraph &&
+      !passwordModalVisible &&
+      !downloadingGraphUuid;
+    const readyProbe = needsReadinessProbe
+      ? await probeGraphReady()
+      : { ok: false, reason: 'skipped' };
+
+    if (state.downloadStarted && !state.downloadCompleted && readyProbe.ok) {
+      state.downloadCompleted = true;
+      state.downloadCompletionSource = 'db-ready-probe';
+    }
+
+    const downloadLifecycleSatisfied = !state.downloadStarted || state.downloadCompleted;
+    const requiresAction = config.requireAction !== false;
+    const ok =
+      switchedToTargetGraph &&
+      !passwordModalVisible &&
+      !downloadingGraphUuid &&
+      readyProbe.ok &&
+      downloadLifecycleSatisfied &&
+      (!requiresAction || state.actionTriggered) &&
+      state.targetStateStableHits >= 2;
+    const availableCards = listGraphCards().slice(0, 10).map((card) => ({
+      dataTestId: card.getAttribute('data-testid'),
+      text: (card.textContent || '').replace(/\\s+/g, ' ').trim().slice(0, 120),
+    }));
+
+    return {
+      ok,
+      targetGraph: config.graphName,
+      initialGraphName: state.initialGraphName || null,
+      initialRepoName: state.initialRepoName || null,
+      initialTargetMatched: state.initialTargetMatched,
+      currentGraphName,
+      currentRepoName,
+      gotoGraphsOk: state.gotoGraphsOk,
+      gotoGraphsError: state.gotoGraphsError,
+      onGraphsPage: onGraphsPageFinal,
+      downloadingGraphUuid,
+      switchedToTargetGraph,
+      repoMatchesTarget,
+      graphMatchesTarget,
+      readyProbe,
+      actionTriggered: state.actionTriggered,
+      graphDetected: state.graphDetected,
+      graphCardClicked: state.graphCardClicked,
+      graphCardClickedThisStep,
+      switchAttempts: state.switchAttempts,
+      refreshCount: state.refreshCount,
+      refreshClickedThisStep,
+      passwordAttempts: state.passwordAttempts,
+      passwordAttempted,
+      passwordModalVisible,
+      passwordSubmitted: state.passwordSubmitted,
+      passwordSubmittedThisStep,
+      downloadStarted: state.downloadStarted,
+      downloadCompleted: state.downloadCompleted,
+      downloadCompletionSource: state.downloadCompletionSource,
+      targetStateStableHits: state.targetStateStableHits,
+      lastDownloadLog: state.lastDownloadLog,
+      availableCards,
+    };
+  })())()`;
+}
+
+async function runGraphBootstrap(sessionName, args, runOptions) {
+  const deadline = Date.now() + args.switchTimeoutMs;
+  const bootstrapRunId = `${Date.now()}-${Math.random().toString(16).slice(2)}`;
+  let lastBootstrap = null;
+
+  while (Date.now() < deadline) {
+    const bootstrapProgram = buildGraphBootstrapProgram({
+      runId: bootstrapRunId,
+      graphName: args.graph,
+      password: args.e2ePassword,
+      forceSelection: true,
+      requireAction: true,
+    });
+    const bootstrapEvaluation = await runAgentBrowser(
+      sessionName,
+      ['eval', '--stdin'],
+      {
+        input: bootstrapProgram,
+        ...runOptions,
+      }
+    );
+    const bootstrap = bootstrapEvaluation?.data?.result;
+    if (!bootstrap || typeof bootstrap !== 'object') {
+      throw new Error('Graph bootstrap returned empty state for session ' + sessionName);
+    }
+
+    lastBootstrap = bootstrap;
+    if (bootstrap.ok) {
+      return bootstrap;
+    }
+
+    await sleep(250);
+  }
+
+  throw new Error(
+    'Failed to switch/download graph "' + args.graph + '" within timeout. ' +
+    'Last bootstrap state: ' + JSON.stringify(lastBootstrap)
+  );
+}
+
+function buildGraphProbeProgram(graphName) {
+  return `(() => (async () => {
+    const target = ${JSON.stringify(String(graphName || ''))}.toLowerCase();
+    const lower = (v) => String(v || '').toLowerCase();
+    const matches = (value) => {
+      const v = lower(value);
+      if (!v) return false;
+      return v === target || v.endsWith('/' + target) || v.endsWith('_' + target) || v.includes('logseq_db_' + target);
+    };
+
+    let currentGraphName = null;
+    let currentRepoName = null;
+    try {
+      if (globalThis.logseq?.api?.get_current_graph) {
+        const current = await logseq.api.get_current_graph();
+        currentGraphName = current?.name || current?.url || null;
+      }
+    } catch (_error) {
+      // ignore
+    }
+    try {
+      if (globalThis.logseq?.api?.get_state_from_store) {
+        currentRepoName = logseq.api.get_state_from_store(['git/current-repo']) || null;
+      }
+    } catch (_error) {
+      // ignore
+    }
+
+    const repoMatchesTarget = matches(currentRepoName);
+    const graphMatchesTarget = matches(currentGraphName);
+    const onGraphsPage = location.hash.includes('/graphs');
+    const stableTarget = (repoMatchesTarget || graphMatchesTarget) && !onGraphsPage;
+
+    return {
+      targetGraph: ${JSON.stringify(String(graphName || ''))},
+      currentGraphName,
+      currentRepoName,
+      repoMatchesTarget,
+      graphMatchesTarget,
+      onGraphsPage,
+      stableTarget,
+    };
+  })())()`;
+}
+
+async function ensureTargetGraphBeforeOps(sessionName, args, runOptions) {
+  let lastProbe = null;
+  let lastBootstrap = null;
+  for (let attempt = 0; attempt < 4; attempt += 1) {
+    const probeEval = await runAgentBrowser(
+      sessionName,
+      ['eval', '--stdin'],
+      {
+        input: buildGraphProbeProgram(args.graph),
+        ...runOptions,
+      }
+    );
+    const probe = probeEval?.data?.result;
+    lastProbe = probe;
+    if (probe?.stableTarget) {
+      return { ok: true, probe, bootstrap: lastBootstrap };
+    }
+
+    lastBootstrap = await runGraphBootstrap(sessionName, args, runOptions);
+  }
+
+  throw new Error(
+    'Target graph verification failed before ops. ' +
+    'Last probe: ' + JSON.stringify(lastProbe) + '. ' +
+    'Last bootstrap: ' + JSON.stringify(lastBootstrap)
+  );
+}
+
+function buildSessionNames(baseSession, instances) {
+  if (instances <= 1) return [baseSession];
+  const sessions = [];
+  for (let i = 0; i < instances; i += 1) {
+    sessions.push(`${baseSession}-${i + 1}`);
+  }
+  return sessions;
+}
+
+function shuffleOperationPlan(plan) {
+  const shuffled = Array.isArray(plan) ? [...plan] : [];
+  for (let i = shuffled.length - 1; i > 0; i -= 1) {
+    const j = Math.floor(Math.random() * (i + 1));
+    const tmp = shuffled[i];
+    shuffled[i] = shuffled[j];
+    shuffled[j] = tmp;
+  }
+  return shuffled;
+}
+
+async function runSimulationForSession(sessionName, index, args, sharedConfig) {
+  if (args.resetSession) {
+    try {
+      await runAgentBrowser(sessionName, ['close'], {
+        autoConnect: false,
+        headed: false,
+      });
+    } catch (_error) {
+      // session may not exist yet
+    }
+  }
+
+  const runOptions = {
+    headed: args.headed,
+    autoConnect: args.autoConnect,
+    profile: sharedConfig.instanceProfiles[index] ?? null,
+    launchArgs: sharedConfig.effectiveLaunchArgs,
+    executablePath: sharedConfig.effectiveExecutablePath,
+  };
+
+  await runAgentBrowser(sessionName, ['open', args.url], runOptions);
+  await ensureActiveTabOnTargetUrl(sessionName, args.url, runOptions);
+
+  const bootstrap = await runGraphBootstrap(sessionName, args, runOptions);
+
+  const clientPlan = shuffleOperationPlan(sharedConfig.plan);
+  const markerPrefix = `${sharedConfig.runPrefix}client-${index + 1}-`;
+  const rendererProgram = buildRendererProgram({
+    runPrefix: sharedConfig.runPrefix,
+    markerPrefix,
+    plan: clientPlan,
+    undoRedoDelayMs: args.undoRedoDelayMs,
+    readyTimeoutMs: RENDERER_READY_TIMEOUT_MS,
+    readyPollDelayMs: RENDERER_READY_POLL_DELAY_MS,
+    fallbackPageName: FALLBACK_PAGE_NAME,
+  });
+
+  const evaluation = await runAgentBrowser(
+    sessionName,
+    ['eval', '--stdin'],
+    {
+      input: rendererProgram,
+      ...runOptions,
+    }
+  );
+
+  const value = evaluation?.data?.result;
+  if (!value) {
+    throw new Error('Unexpected empty result from agent-browser eval');
+  }
+
+  value.runtime = {
+    session: sessionName,
+    instanceIndex: index + 1,
+    effectiveProfile: runOptions.profile,
+    effectiveLaunchArgs: sharedConfig.effectiveLaunchArgs,
+    effectiveExecutablePath: sharedConfig.effectiveExecutablePath,
+    bootstrap,
+    autoConnect: args.autoConnect,
+    headed: args.headed,
+  };
+
+  return value;
+}
+
+async function main() {
+  let args;
+  try {
+    args = parseArgs(process.argv.slice(2));
+  } catch (error) {
+    console.error(error.message);
+    console.error('\n' + usage());
+    process.exit(1);
+    return;
+  }
+
+  if (args.help) {
+    console.log(usage());
+    return;
+  }
+
+  const preview = {
+    url: args.url,
+    session: args.session,
+    instances: args.instances,
+    graph: args.graph,
+    e2ePassword: args.e2ePassword,
+    switchTimeoutMs: args.switchTimeoutMs,
+    profile: args.profile,
+    executablePath: args.executablePath,
+    autoConnect: args.autoConnect,
+    resetSession: args.resetSession,
+    ops: args.ops,
+    undoRedoDelayMs: args.undoRedoDelayMs,
+    headed: args.headed,
+  };
+
+  if (args.printOnly) {
+    console.log(JSON.stringify(preview, null, 2));
+    return;
+  }
+
+  await spawnAndCapture('agent-browser', ['--version']);
+
+  const sessionNames = buildSessionNames(args.session, args.instances);
+  let effectiveProfile;
+  if (args.profile === 'none') {
+    effectiveProfile = null;
+  } else if (args.profile === 'auto') {
+    const autoName = await detectChromeProfile();
+    effectiveProfile = await resolveProfileArgument(autoName);
+  } else {
+    effectiveProfile = await resolveProfileArgument(args.profile);
+  }
+  const effectiveExecutablePath =
+    args.executablePath || (await detectChromeExecutablePath());
+  const effectiveLaunchArgs = effectiveProfile ? buildChromeLaunchArgs(args.url) : null;
+
+  const instanceProfiles = [];
+  if (args.instances <= 1 || !effectiveProfile) {
+    for (let i = 0; i < args.instances; i += 1) {
+      instanceProfiles.push(effectiveProfile);
+    }
+  } else if (looksLikePath(effectiveProfile)) {
+    for (let i = 0; i < args.instances; i += 1) {
+      instanceProfiles.push(effectiveProfile);
+    }
+  } else {
+    instanceProfiles.push(effectiveProfile);
+    for (let i = 1; i < args.instances; i += 1) {
+      const isolated = await createIsolatedChromeUserDataDir(effectiveProfile, i + 1);
+      instanceProfiles.push(isolated);
+    }
+  }
+
+  const sharedConfig = {
+    runPrefix: `op-sim-${Date.now()}-`,
+    effectiveProfile,
+    instanceProfiles,
+    effectiveLaunchArgs,
+    effectiveExecutablePath,
+    plan: buildOperationPlan(args.ops),
+  };
+
+  const tasks = sessionNames.map((sessionName, index) =>
+    runSimulationForSession(sessionName, index, args, sharedConfig)
+  );
+  const settled = await Promise.allSettled(tasks);
+
+  if (sessionNames.length === 1) {
+    const single = settled[0];
+    if (single.status === 'rejected') {
+      throw single.reason;
+    }
+    const value = single.value;
+    console.log(JSON.stringify(value, null, 2));
+    if (!value.ok || value.executedOps < args.ops) {
+      process.exitCode = 2;
+    }
+    return;
+  }
+
+  const results = settled.map((entry, idx) => {
+    const sessionName = sessionNames[idx];
+    if (entry.status === 'fulfilled') {
+      const value = entry.value;
+      const passed = Boolean(value?.ok) && Number(value?.executedOps || 0) >= args.ops;
+      return {
+        session: sessionName,
+        instanceIndex: idx + 1,
+        ok: passed,
+        result: value,
+      };
+    }
+
+    return {
+      session: sessionName,
+      instanceIndex: idx + 1,
+      ok: false,
+      error: String(entry.reason?.stack || entry.reason?.message || entry.reason),
+    };
+  });
+
+  const successCount = results.filter((item) => item.ok).length;
+  const output = {
+    ok: successCount === results.length,
+    instances: results.length,
+    successCount,
+    failureCount: results.length - successCount,
+    results,
+  };
+
+  console.log(JSON.stringify(output, null, 2));
+  if (!output.ok) {
+    process.exitCode = 2;
+  }
+}
+
+main().catch((error) => {
+  console.error(error.stack || String(error));
+  process.exit(1);
+});

+ 670 - 0
scripts/sync-open-electrion-simulate.cjs

@@ -0,0 +1,670 @@
+#!/usr/bin/env node
+'use strict';
+
+const {
+  buildOperationPlan,
+  parseArgs,
+} = require('./lib/logseq-electron-op-sim.cjs');
+
+const DEFAULT_TARGET_TITLE = 'Logseq';
+const WebSocketCtor = globalThis.WebSocket;
+const DEBUG_TARGET_WAIT_TIMEOUT_MS = 30000;
+const DEBUG_TARGET_RETRY_DELAY_MS = 300;
+const RENDERER_READY_TIMEOUT_MS = 30000;
+const RENDERER_READY_POLL_DELAY_MS = 250;
+const BASE_EVALUATE_TIMEOUT_MS = 120000;
+const PER_OP_EVALUATE_TIMEOUT_MS = 250;
+const FALLBACK_PAGE_NAME = 'op-sim-scratch';
+
+function sleep(ms) {
+  return new Promise((resolve) => setTimeout(resolve, ms));
+}
+
+function usage() {
+  return [
+    'Usage: node scripts/logseq-electron-op-sim.cjs [options]',
+    '',
+    'Options:',
+    '  --ops <n>                 Total operations to execute (must be >= 200, default: 200)',
+    '  --port <n>                Electron remote debug port (default: 9333)',
+    '  --undo-redo-delay-ms <n>  Wait time after undo/redo command (default: 350)',
+    '  -h, --help                Show this message',
+    '',
+    'Prerequisite: start Logseq Electron with --remote-debugging-port=<port>.',
+  ].join('\n');
+}
+
+function wsAddListener(ws, event, handler) {
+  if (typeof ws.addEventListener === 'function') {
+    ws.addEventListener(event, handler);
+    return;
+  }
+
+  ws.on(event, (...args) => {
+    if (event === 'message') {
+      const payload = typeof args[0] === 'string' ? args[0] : args[0].toString();
+      handler({ data: payload });
+      return;
+    }
+    handler(...args);
+  });
+}
+
+function createCdpClient(ws) {
+  let id = 0;
+  const pending = new Map();
+
+  wsAddListener(ws, 'message', (event) => {
+    const message = JSON.parse(event.data);
+    if (!message.id) return;
+
+    const callbacks = pending.get(message.id);
+    if (!callbacks) return;
+    pending.delete(message.id);
+
+    if (message.error) {
+      callbacks.reject(new Error(`CDP error on ${callbacks.method}: ${message.error.message}`));
+    } else {
+      callbacks.resolve(message.result);
+    }
+  });
+
+  wsAddListener(ws, 'close', () => {
+    for (const entry of pending.values()) {
+      entry.reject(new Error('CDP connection closed before response'));
+    }
+    pending.clear();
+  });
+
+  return {
+    send(method, params = {}, timeoutMs = 20000) {
+      const requestId = ++id;
+      const payload = JSON.stringify({ id: requestId, method, params });
+
+      return new Promise((resolve, reject) => {
+        const timeout = setTimeout(() => {
+          pending.delete(requestId);
+          reject(new Error(`Timeout waiting for ${method}`));
+        }, timeoutMs);
+
+        pending.set(requestId, {
+          method,
+          resolve: (result) => {
+            clearTimeout(timeout);
+            resolve(result);
+          },
+          reject: (error) => {
+            clearTimeout(timeout);
+            reject(error);
+          },
+        });
+
+        ws.send(payload);
+      });
+    },
+  };
+}
+
+function pickPageTarget(targets) {
+  const pageTargets = targets.filter(
+    (target) => target.type === 'page' && typeof target.webSocketDebuggerUrl === 'string'
+  );
+  if (pageTargets.length === 0) {
+    throw new Error('No page target found on debug endpoint');
+  }
+
+  return (
+    pageTargets.find((target) => (target.title || '').includes(DEFAULT_TARGET_TITLE)) ||
+    pageTargets[0]
+  );
+}
+
+function listPageTargets(targets) {
+  return targets
+    .filter((target) => target.type === 'page' && typeof target.webSocketDebuggerUrl === 'string')
+    .sort((a, b) => {
+      const aPreferred = (a.title || '').includes(DEFAULT_TARGET_TITLE) ? 1 : 0;
+      const bPreferred = (b.title || '').includes(DEFAULT_TARGET_TITLE) ? 1 : 0;
+      return bPreferred - aPreferred;
+    });
+}
+
+function closeWebSocketQuietly(ws) {
+  if (!ws) return;
+  try {
+    ws.close();
+  } catch (_error) {
+    // ignore close errors
+  }
+}
+
+async function targetHasLogseqApi(cdp) {
+  const evaluation = await cdp.send(
+    'Runtime.evaluate',
+    {
+      expression: `(() => {
+        const api = globalThis.logseq?.api;
+        return !!(
+          api &&
+          typeof api.get_current_block === 'function' &&
+          (
+            typeof api.get_current_page === 'function' ||
+            typeof api.get_today_page === 'function'
+          ) &&
+          typeof api.append_block_in_page === 'function'
+        );
+      })()`,
+      returnByValue: true,
+      awaitPromise: false,
+    },
+    10000,
+  );
+  return evaluation?.result?.value === true;
+}
+
+function buildRendererProgram(config) {
+  return `(() => (async () => {
+    const config = ${JSON.stringify(config)};
+    const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
+    const randomItem = (items) => items[Math.floor(Math.random() * items.length)];
+    const shuffle = (items) => [...items].sort(() => Math.random() - 0.5);
+    const describeError = (error) => String(error?.message || error);
+    const asPageName = (pageLike) => {
+      if (typeof pageLike === 'string' && pageLike.length > 0) return pageLike;
+      if (!pageLike || typeof pageLike !== 'object') return null;
+      if (typeof pageLike.name === 'string' && pageLike.name.length > 0) return pageLike.name;
+      if (typeof pageLike.originalName === 'string' && pageLike.originalName.length > 0) return pageLike.originalName;
+      if (typeof pageLike.title === 'string' && pageLike.title.length > 0) return pageLike.title;
+      return null;
+    };
+
+    const waitForEditorReady = async () => {
+      const deadline = Date.now() + config.readyTimeoutMs;
+      let lastError = null;
+
+      while (Date.now() < deadline) {
+        try {
+          if (
+            globalThis.logseq?.api &&
+            typeof logseq.api.get_current_block === 'function' &&
+            (
+              typeof logseq.api.get_current_page === 'function' ||
+              typeof logseq.api.get_today_page === 'function'
+            ) &&
+            typeof logseq.api.append_block_in_page === 'function'
+          ) {
+            return;
+          }
+        } catch (error) {
+          lastError = error;
+        }
+
+        await sleep(config.readyPollDelayMs);
+      }
+
+      if (lastError) {
+        throw new Error('Logseq editor readiness timed out: ' + describeError(lastError));
+      }
+      throw new Error('Logseq editor readiness timed out: logseq.api is unavailable');
+    };
+
+    const runPrefix =
+      typeof config.runPrefix === 'string' && config.runPrefix.length > 0
+        ? config.runPrefix
+        : config.markerPrefix;
+
+    const chooseRunnableOperation = (requestedOperation, operableCount) => {
+      if (requestedOperation === 'copyPaste' || requestedOperation === 'copyPasteTreeToEmptyTarget') {
+        return operableCount >= 1 ? requestedOperation : 'add';
+      }
+      if (requestedOperation === 'move' || requestedOperation === 'indent' || requestedOperation === 'delete') {
+        return operableCount >= 2 ? requestedOperation : 'add';
+      }
+      return requestedOperation;
+    };
+
+    const flattenBlocks = (nodes, acc = []) => {
+      if (!Array.isArray(nodes)) return acc;
+      for (const node of nodes) {
+        if (!node) continue;
+        acc.push(node);
+        if (Array.isArray(node.children) && node.children.length > 0) {
+          flattenBlocks(node.children, acc);
+        }
+      }
+      return acc;
+    };
+
+    const isClientBlock = (block) =>
+      typeof block?.content === 'string' && block.content.startsWith(config.markerPrefix);
+
+    const isOperableBlock = (block) =>
+      typeof block?.content === 'string' && block.content.startsWith(runPrefix);
+
+    const listOperableBlocks = async () => {
+      const tree = await logseq.api.get_current_page_blocks_tree();
+      const flattened = flattenBlocks(tree, []);
+      return flattened.filter(isOperableBlock);
+    };
+
+    const listManagedBlocks = async () => {
+      const operableBlocks = await listOperableBlocks();
+      return operableBlocks.filter(isClientBlock);
+    };
+
+    const pickIndentCandidate = async (blocks) => {
+      for (const candidate of shuffle(blocks)) {
+        const prev = await logseq.api.get_previous_sibling_block(candidate.uuid);
+        if (prev?.uuid) return candidate;
+      }
+      return null;
+    };
+
+    const pickOutdentCandidate = async (blocks) => {
+      for (const candidate of shuffle(blocks)) {
+        const full = await logseq.api.get_block(candidate.uuid, { includeChildren: false });
+        const parentId = full?.parent?.id;
+        const pageId = full?.page?.id;
+        if (parentId && pageId && parentId !== pageId) {
+          return candidate;
+        }
+      }
+      return null;
+    };
+
+    const toBatchTree = (block) => ({
+      content: typeof block?.content === 'string' ? block.content : '',
+      children: Array.isArray(block?.children) ? block.children.map(toBatchTree) : [],
+    });
+
+    const getAnchor = async () => {
+      const deadline = Date.now() + config.readyTimeoutMs;
+      let lastError = null;
+
+      while (Date.now() < deadline) {
+        try {
+          const currentBlock = await logseq.api.get_current_block();
+          if (currentBlock && currentBlock.uuid) {
+            return currentBlock;
+          }
+
+          if (typeof logseq.api.get_current_page === 'function') {
+            const currentPage = await logseq.api.get_current_page();
+            const currentPageName = asPageName(currentPage);
+            if (currentPageName) {
+              const seeded = await logseq.api.append_block_in_page(
+                currentPageName,
+                config.markerPrefix + ' anchor',
+                {}
+              );
+              if (seeded?.uuid) return seeded;
+            }
+          }
+
+          if (typeof logseq.api.get_today_page === 'function') {
+            const todayPage = await logseq.api.get_today_page();
+            const todayPageName = asPageName(todayPage);
+            if (todayPageName) {
+              const seeded = await logseq.api.append_block_in_page(
+                todayPageName,
+                config.markerPrefix + ' anchor',
+                {}
+              );
+              if (seeded?.uuid) return seeded;
+            }
+          }
+
+          {
+            const seeded = await logseq.api.append_block_in_page(
+              config.fallbackPageName,
+              config.markerPrefix + ' anchor',
+              {}
+            );
+            if (seeded?.uuid) return seeded;
+          }
+        } catch (error) {
+          lastError = error;
+        }
+
+        await sleep(config.readyPollDelayMs);
+      }
+
+      if (lastError) {
+        throw new Error('Unable to resolve anchor block: ' + describeError(lastError));
+      }
+      throw new Error('Unable to resolve anchor block: open a graph and page, then retry');
+    };
+
+    const counts = {
+      add: 0,
+      delete: 0,
+      move: 0,
+      indent: 0,
+      outdent: 0,
+      undo: 0,
+      redo: 0,
+      copyPaste: 0,
+      copyPasteTreeToEmptyTarget: 0,
+      fallbackAdd: 0,
+      errors: 0,
+    };
+
+    const errors = [];
+    const operationLog = [];
+
+    await waitForEditorReady();
+    const anchor = await getAnchor();
+
+    if (!(await listManagedBlocks()).length) {
+      await logseq.api.insert_block(anchor.uuid, config.markerPrefix + ' seed', {
+        sibling: true,
+        before: false,
+        focus: false,
+      });
+    }
+
+    let executed = 0;
+
+    for (let i = 0; i < config.plan.length; i += 1) {
+      const requested = config.plan[i];
+      const operable = await listOperableBlocks();
+      let operation = chooseRunnableOperation(requested, operable.length);
+      if (operation !== requested) {
+        counts.fallbackAdd += 1;
+      }
+
+      try {
+        await sleep(Math.floor(Math.random() * 40));
+
+        if (operation === 'add') {
+          const target = operable.length > 0 ? randomItem(operable) : anchor;
+          const content = config.markerPrefix + ' add-' + i;
+          await logseq.api.insert_block(target.uuid, content, {
+            sibling: true,
+            before: false,
+            focus: false,
+          });
+        }
+
+        if (operation === 'copyPaste') {
+          const source = randomItem(operable);
+          const target = randomItem(operable);
+          await logseq.api.select_block(source.uuid);
+          await logseq.api.invoke_external_command('logseq.editor/copy');
+          const latestSource = await logseq.api.get_block(source.uuid);
+          await logseq.api.insert_block(target.uuid, latestSource?.content || source.content || '', {
+            sibling: true,
+            before: false,
+            focus: false,
+          });
+        }
+
+        if (operation === 'copyPasteTreeToEmptyTarget') {
+          const source = randomItem(operable);
+          const sourceTree = await logseq.api.get_block(source.uuid, { includeChildren: true });
+          if (!sourceTree?.uuid) {
+            throw new Error('Failed to load source tree block');
+          }
+
+          const treeTarget = operable.length > 0 ? randomItem(operable) : anchor;
+          const emptyTarget = await logseq.api.insert_block(treeTarget.uuid, config.markerPrefix + ' tree-target-' + i, {
+            sibling: true,
+            before: false,
+            focus: false,
+          });
+          if (!emptyTarget?.uuid) {
+            throw new Error('Failed to create empty target block');
+          }
+
+          await logseq.api.update_block(emptyTarget.uuid, '');
+          await logseq.api.insert_batch_block(emptyTarget.uuid, toBatchTree(sourceTree), { sibling: false });
+        }
+
+        if (operation === 'move') {
+          const source = randomItem(operable);
+          const candidates = operable.filter((block) => block.uuid !== source.uuid);
+          const target = randomItem(candidates);
+          await logseq.api.move_block(source.uuid, target.uuid, {
+            before: Math.random() < 0.5,
+            children: false,
+          });
+        }
+
+        if (operation === 'indent') {
+          const candidate = await pickIndentCandidate(operable);
+          if (!candidate?.uuid) {
+            throw new Error('No block can be indented in current operable set');
+          }
+          await logseq.api.select_block(candidate.uuid);
+          await logseq.api.invoke_external_command('logseq.editor/indent');
+        }
+
+        if (operation === 'outdent') {
+          const candidate = await pickOutdentCandidate(operable);
+          if (!candidate?.uuid) {
+            throw new Error('No block can be outdented in current operable set');
+          }
+          await logseq.api.select_block(candidate.uuid);
+          await logseq.api.invoke_external_command('logseq.editor/outdent');
+        }
+
+        if (operation === 'delete') {
+          const candidates = operable.filter((block) => block.uuid !== anchor.uuid);
+          const victimPool = candidates.length > 0 ? candidates : operable;
+          const victim = randomItem(victimPool);
+          await logseq.api.remove_block(victim.uuid);
+        }
+
+        if (operation === 'undo') {
+          await logseq.api.invoke_external_command('logseq.editor/undo');
+          await sleep(config.undoRedoDelayMs);
+        }
+
+        if (operation === 'redo') {
+          await logseq.api.invoke_external_command('logseq.editor/redo');
+          await sleep(config.undoRedoDelayMs);
+        }
+
+        counts[operation] += 1;
+        executed += 1;
+        operationLog.push({ index: i, requested, executedAs: operation });
+      } catch (error) {
+        counts.errors += 1;
+        errors.push({
+          index: i,
+          requested,
+          attempted: operation,
+          message: String(error?.message || error),
+        });
+
+        try {
+          const recoveryOperable = await listOperableBlocks();
+          const target = recoveryOperable.length > 0 ? randomItem(recoveryOperable) : anchor;
+          await logseq.api.insert_block(target.uuid, config.markerPrefix + ' recovery-' + i, {
+            sibling: true,
+            before: false,
+            focus: false,
+          });
+          counts.add += 1;
+          executed += 1;
+          operationLog.push({ index: i, requested, executedAs: 'add' });
+        } catch (recoveryError) {
+          errors.push({
+            index: i,
+            requested,
+            attempted: 'recovery-add',
+            message: String(recoveryError?.message || recoveryError),
+          });
+          break;
+        }
+      }
+    }
+
+    const finalManaged = await listManagedBlocks();
+    return {
+      ok: true,
+      requestedOps: config.plan.length,
+      executedOps: executed,
+      counts,
+      markerPrefix: config.markerPrefix,
+      anchorUuid: anchor.uuid,
+      finalManagedCount: finalManaged.length,
+      sampleManaged: finalManaged.slice(0, 5).map((block) => ({
+        uuid: block.uuid,
+        content: block.content,
+      })),
+      errorCount: errors.length,
+      errors: errors.slice(0, 20),
+      opLogSample: operationLog.slice(0, 20),
+    };
+  })())()`;
+}
+
+async function openWebSocket(url) {
+  if (!WebSocketCtor) {
+    throw new Error('Global WebSocket is unavailable in this Node runtime.');
+  }
+
+  const ws = new WebSocketCtor(url);
+  await new Promise((resolve, reject) => {
+    wsAddListener(ws, 'open', resolve);
+    wsAddListener(ws, 'error', reject);
+  });
+  return ws;
+}
+
+async function fetchDebugTargets(port) {
+  const endpoint = `http://127.0.0.1:${port}/json/list`;
+  const response = await fetch(endpoint);
+  if (!response.ok) {
+    throw new Error(`Debug endpoint returned HTTP ${response.status} for ${endpoint}`);
+  }
+
+  const targets = await response.json();
+  if (!Array.isArray(targets)) {
+    throw new Error('Debug endpoint returned an invalid target list');
+  }
+  return targets;
+}
+
+async function connectToLogseqPageWebSocket(port) {
+  const deadline = Date.now() + DEBUG_TARGET_WAIT_TIMEOUT_MS;
+  let lastError = null;
+
+  while (Date.now() < deadline) {
+    try {
+      const targets = await fetchDebugTargets(port);
+      const pageTargets = listPageTargets(targets);
+      if (pageTargets.length === 0) {
+        throw new Error('No page target found on debug endpoint');
+      }
+
+      let lastTargetError = null;
+      for (const target of pageTargets) {
+        let ws = null;
+        try {
+          ws = await openWebSocket(target.webSocketDebuggerUrl);
+          const cdp = createCdpClient(ws);
+          await cdp.send('Runtime.enable');
+          const hasLogseqApi = await targetHasLogseqApi(cdp);
+          if (hasLogseqApi) {
+            return { ws, cdp };
+          }
+          closeWebSocketQuietly(ws);
+        } catch (error) {
+          lastTargetError = error;
+          closeWebSocketQuietly(ws);
+        }
+      }
+
+      throw lastTargetError || new Error('No page target exposes logseq.api yet');
+    } catch (error) {
+      lastError = error;
+      await sleep(DEBUG_TARGET_RETRY_DELAY_MS);
+    }
+  }
+
+  const suffix = lastError ? ` Last error: ${String(lastError.message || lastError)}` : '';
+  throw new Error(
+    `Unable to connect to a Logseq page target within ${DEBUG_TARGET_WAIT_TIMEOUT_MS}ms.` + suffix
+  );
+}
+
+function computeEvaluateTimeoutMs(args) {
+  return BASE_EVALUATE_TIMEOUT_MS + args.ops * PER_OP_EVALUATE_TIMEOUT_MS;
+}
+
+function shuffleOperationPlan(plan) {
+  const shuffled = Array.isArray(plan) ? [...plan] : [];
+  for (let i = shuffled.length - 1; i > 0; i -= 1) {
+    const j = Math.floor(Math.random() * (i + 1));
+    const tmp = shuffled[i];
+    shuffled[i] = shuffled[j];
+    shuffled[j] = tmp;
+  }
+  return shuffled;
+}
+
+async function main() {
+  let args;
+  try {
+    args = parseArgs(process.argv.slice(2));
+  } catch (error) {
+    console.error(error.message);
+    console.error('\n' + usage());
+    process.exit(1);
+    return;
+  }
+
+  if (args.help) {
+    console.log(usage());
+    return;
+  }
+
+  const runPrefix = `op-sim-${Date.now()}-`;
+  const plan = shuffleOperationPlan(buildOperationPlan(args.ops));
+  const markerPrefix = `${runPrefix}client-1-`;
+  const { ws, cdp } = await connectToLogseqPageWebSocket(args.port);
+  let evaluation;
+  try {
+    evaluation = await cdp.send(
+      'Runtime.evaluate',
+      {
+        expression: buildRendererProgram({
+          runPrefix,
+          markerPrefix,
+          plan,
+          undoRedoDelayMs: args.undoRedoDelayMs,
+          readyTimeoutMs: RENDERER_READY_TIMEOUT_MS,
+          readyPollDelayMs: RENDERER_READY_POLL_DELAY_MS,
+          fallbackPageName: FALLBACK_PAGE_NAME,
+        }),
+        awaitPromise: true,
+        returnByValue: true,
+      },
+      computeEvaluateTimeoutMs(args),
+    );
+  } finally {
+    ws.close();
+  }
+
+  if (evaluation?.exceptionDetails) {
+    const detail = evaluation.exceptionDetails.text || evaluation.exceptionDetails.exception?.description;
+    throw new Error(`Runtime.evaluate failed: ${detail || 'unknown renderer exception'}`);
+  }
+  const value = evaluation?.result?.value;
+  if (!value) {
+    throw new Error('Unexpected empty Runtime.evaluate result');
+  }
+
+  console.log(JSON.stringify(value, null, 2));
+
+  if (!value.ok || value.executedOps < args.ops) {
+    process.exitCode = 2;
+  }
+}
+
+main().catch((error) => {
+  console.error(error.stack || String(error));
+  process.exit(1);
+});

+ 84 - 0
scripts/test/logseq/logseq-electron-op-sim.test.cjs

@@ -0,0 +1,84 @@
+const test = require('node:test');
+const assert = require('node:assert/strict');
+
+const {
+  buildOperationPlan,
+  chooseRunnableOperation,
+  parseArgs,
+} = require('../../lib/logseq-electron-op-sim.cjs');
+
+test('buildOperationPlan generates requested length and all operation kinds', () => {
+  const plan = buildOperationPlan(500);
+
+  assert.equal(plan.length, 500);
+  assert.deepEqual(plan.slice(0, 9), [
+    'add',
+    'copyPaste',
+    'copyPasteTreeToEmptyTarget',
+    'move',
+    'indent',
+    'outdent',
+    'delete',
+    'undo',
+    'redo',
+  ]);
+
+  const kinds = new Set(plan);
+  assert.deepEqual(
+    kinds,
+    new Set([
+      'add',
+      'copyPaste',
+      'copyPasteTreeToEmptyTarget',
+      'move',
+      'indent',
+      'outdent',
+      'delete',
+      'undo',
+      'redo',
+    ]),
+  );
+});
+
+test('buildOperationPlan rejects invalid total operation count', () => {
+  assert.throws(() => buildOperationPlan(0), /positive integer/);
+  assert.throws(() => buildOperationPlan(-1), /positive integer/);
+  assert.throws(() => buildOperationPlan(12.5), /positive integer/);
+  assert.throws(() => buildOperationPlan('500'), /positive integer/);
+});
+
+test('chooseRunnableOperation falls back to add when operation preconditions are unmet', () => {
+  assert.equal(chooseRunnableOperation('copyPaste', 0), 'add');
+  assert.equal(chooseRunnableOperation('copyPasteTreeToEmptyTarget', 0), 'add');
+  assert.equal(chooseRunnableOperation('move', 1), 'add');
+  assert.equal(chooseRunnableOperation('indent', 1), 'add');
+  assert.equal(chooseRunnableOperation('delete', 1), 'add');
+
+  assert.equal(chooseRunnableOperation('move', 2), 'move');
+  assert.equal(chooseRunnableOperation('indent', 2), 'indent');
+  assert.equal(chooseRunnableOperation('outdent', 1), 'outdent');
+  assert.equal(chooseRunnableOperation('delete', 2), 'delete');
+  assert.equal(chooseRunnableOperation('undo', 0), 'undo');
+  assert.equal(chooseRunnableOperation('redo', 0), 'redo');
+});
+
+test('parseArgs defaults to 500 ops on default debug port', () => {
+  const args = parseArgs([]);
+  assert.equal(args.ops, 500);
+  assert.equal(args.port, 9333);
+  assert.equal(args.undoRedoDelayMs, 350);
+});
+
+test('parseArgs supports overrides', () => {
+  const args = parseArgs(['--ops', '750', '--port', '9444', '--undo-redo-delay-ms', '200']);
+  assert.equal(args.ops, 750);
+  assert.equal(args.port, 9444);
+  assert.equal(args.undoRedoDelayMs, 200);
+});
+
+test('parseArgs rejects invalid numbers', () => {
+  assert.throws(() => parseArgs(['--ops', '0']), /--ops must be a positive integer/);
+  assert.throws(() => parseArgs(['--ops', '499']), /--ops must be at least 500/);
+  assert.throws(() => parseArgs(['--port', '-1']), /--port must be a positive integer/);
+  assert.throws(() => parseArgs(['--undo-redo-delay-ms', '-1']), /--undo-redo-delay-ms must be a non-negative integer/);
+});

+ 7 - 6
src/main/frontend/handler/common/developer.cljs

@@ -164,7 +164,9 @@
                      :server-checksum (:checksum server-diagnostics)
                      :different-blocks diff-blocks}]
     (pprint/pprint diff-data)
-    (js/console.warn "Checksum mismatch between client and server. Diff data:" diff-data)))
+    (when (seq diff-blocks)
+      (js/console.warn "Checksum mismatch between client and server. Diff data:" diff-data))))
+
 
 (defn ^:export recompute-checksum-diagnostics
   []
@@ -184,11 +186,10 @@
                           content (with-out-str (pprint/pprint export-edn))
                           blob (js/Blob. #js [content] (clj->js {:type "text/edn;charset=utf-8"}))
                           filename (checksum-export-file-name repo)]
-                      (p/let [_ (when (client-server-checksum-mismatch? local-checksum remote-checksum)
-                                  (-> (<log-checksum-mismatch-diff! repo export-edn)
-                                      (p/catch (fn [error]
-                                                 (js/console.error "checksum mismatch diff fetch failed:" error)
-                                                 nil))))]
+                      (p/let [_ (-> (<log-checksum-mismatch-diff! repo export-edn)
+                                    (p/catch (fn [error]
+                                               (js/console.error "checksum mismatch diff fetch failed:" error)
+                                               nil)))]
                         (utils/saveToFile blob filename "edn")
                         (notification/show!
                          (str "Checksum recomputed. Recomputed: " recomputed-checksum

+ 14 - 2
src/main/frontend/worker/db_worker.cljs

@@ -193,8 +193,14 @@
   (when db (.close db))
   (when search (.close search))
   (when client-ops (.close client-ops))
+  (sync-download/close-import-state-for-repo! repo)
   (when-let [^js pool (worker-state/get-opfs-pool repo)]
-    (.pauseVfs pool))
+    (try
+      (.pauseVfs pool)
+      (catch :default e
+        (log/warn :db-worker/pause-vfs-failed
+                  {:repo repo
+                   :error (str e)}))))
   (swap! *opfs-pools dissoc repo))
 
 (defn- <invalidate-search-db!
@@ -741,8 +747,14 @@
 
 (def-thread-api :thread-api/release-access-handles
   [repo]
+  (sync-download/close-import-state-for-repo! repo)
   (when-let [^js pool (worker-state/get-opfs-pool repo)]
-    (.pauseVfs pool)
+    (try
+      (.pauseVfs pool)
+      (catch :default e
+        (log/warn :db-worker/pause-vfs-failed
+                  {:repo repo
+                   :error (str e)})))
     nil))
 
 (def-thread-api :thread-api/db-exists

+ 6 - 4
src/main/frontend/worker/sync.cljs

@@ -15,7 +15,8 @@
    [lambdaisland.glogi :as log]
    [logseq.common.util :as common-util]
    [logseq.db-sync.checksum :as sync-checksum]
-   [promesa.core :as p]))
+   [promesa.core :as p]
+   [logseq.db :as ldb]))
 
 (def ^:private reconnect-base-delay-ms 1000)
 (def ^:private reconnect-max-delay-ms 30000)
@@ -53,9 +54,10 @@
 (defn update-local-sync-checksum!
   [repo tx-report]
   (when (worker-state/get-client-ops-conn repo)
-    (client-op/update-local-checksum
-     repo
-     (sync-checksum/update-checksum (client-op/get-local-checksum repo) tx-report))))
+    (let [current-checksum (client-op/get-local-checksum repo)
+          ;; new-checksum (sync-checksum/update-checksum current-checksum tx-report)
+          new-checksum (sync-checksum/recompute-checksum (:db-after tx-report))]
+      (client-op/update-local-checksum repo new-checksum))))
 
 (defn- broadcast-rtc-state!
   [client]

+ 66 - 52
src/main/frontend/worker/sync/apply_txs.cljs

@@ -26,7 +26,8 @@
    [logseq.outliner.page :as outliner-page]
    [logseq.outliner.property :as outliner-property]
    [logseq.outliner.recycle :as outliner-recycle]
-   [promesa.core :as p]))
+   [promesa.core :as p]
+   [logseq.common.util :as common-util]))
 
 (defonce *repo->latest-remote-tx (atom {}))
 (defonce *repo->latest-remote-checksum (atom {}))
@@ -391,7 +392,7 @@
                conn
                tx-meta'
                (fn [row-conn]
-                 (precreate-missing-save-blocks! row-conn ops)
+                 ;; (precreate-missing-save-blocks! row-conn ops)
                  (doseq [op ops]
                    (replay-canonical-outliner-op! row-conn op))))
               {:applied? true
@@ -481,34 +482,55 @@
                          :tx-id (:tx-id local-tx)
                          :outliner-op (:outliner-op local-tx)})))
 
+(defn- replace-uuid-str-with-eid
+  [db v]
+  (if (and (string? v) (common-util/uuid-string? v))
+    (if-let [entity (d/entity db [:block/uuid (uuid v)])]
+      (:db/id entity)
+      v)
+    v))
+
+(defn- resolve-temp-id
+  [db datom-v]
+  (if (and (= (count datom-v) 5)
+           (= (first datom-v) :db/add))
+    (let [[op e a v t] datom-v
+          e' (replace-uuid-str-with-eid db e)
+          v' (replace-uuid-str-with-eid db v)]
+      [op e' a v' t])
+    datom-v))
+
 (defn- transact-remote-txs!
   [conn remote-txs]
   (loop [remaining remote-txs
          index 0
          results []]
-    (if-let [remote-tx (first remaining)]
-      (let [tx-data (->> (:tx-data remote-tx)
-                         seq)
-            report (try
-                     (ldb/transact! conn tx-data {:transact-remote? true})
-                     (catch :default e
-                       (js/console.error e)
-                       (log/error ::transact-remote-txs! {:remote-tx remote-tx
-                                                          :index (inc index)
-                                                          :total (count remote-txs)})
-                       (throw e)))
-            results' (cond-> results
-                       tx-data
-                       (conj {:tx-data tx-data
-                              :report report}))]
-        (recur (next remaining) (inc index) results'))
-      results)))
+    (let [db @conn]
+      (if-let [remote-tx (first remaining)]
+       (let [tx-data (->> (:tx-data remote-tx)
+                          (map (partial resolve-temp-id db))
+                          seq)
+             report (try
+                      (ldb/transact! conn tx-data {:transact-remote? true})
+                      (catch :default e
+                        (js/console.error e)
+                        (log/error ::transact-remote-txs! {:remote-tx remote-tx
+                                                           :index (inc index)
+                                                           :total (count remote-txs)})
+                        (throw e)))
+             results' (cond-> results
+                        tx-data
+                        (conj {:tx-data tx-data
+                               :report report}))]
+         (recur (next remaining) (inc index) results'))
+       results))))
 
 (defn reverse-local-txs!
   [conn local-txs]
-  ;; (prn :debug :local-txs local-txs)
+  (prn :debug :local-txs local-txs)
   (doall
    (->> local-txs
+        (remove (fn [tx] (= :fix (:outliner-op tx))))
         reverse
         (map-indexed
          (fn [index local-tx]
@@ -622,29 +644,10 @@
           block-base (dissoc block :db/id :block/order)
           block' (merge block-base
                         (op-construct/rewrite-block-title-with-retracted-refs db block-base))]
-      (if (some? block-ent)
-        (outliner-core/save-block! conn
-                                   block'
-                                   (assoc (or opts {}) :persist-op? false))
-        (if (and (:block/uuid block')
-                 (or (:block/page block')
-                     (:block/parent block')))
-          (let [target-ref (or (:block/parent block')
-                               (:block/page block'))
-                target-block (d/entity db target-ref)]
-            (when-not target-block
-              (invalid-rebase-op! op {:args args
-                                      :reason :missing-target-block}))
-            (let [now (.now js/Date)
-                  create-block (-> block'
-                                   (assoc :block/created-at now)
-                                   (assoc :block/updated-at now))]
-              (ldb/transact! conn
-                             [create-block]
-                             {:outliner-op :save-block
-                              :persist-op? false})))
-          (invalid-rebase-op! op {:args args
-                                  :reason :missing-block}))))
+      (when (nil? block-ent)
+        (invalid-rebase-op! op {:args args
+                                :reason :missing-block}))
+      (outliner-core/save-block! conn block' opts))
 
     :insert-blocks
     (let [[blocks target-id opts] args
@@ -652,6 +655,7 @@
           db @conn]
       (when-not (and target-block (seq blocks))
         (invalid-rebase-op! op {:args args}))
+      (prn :debug :insert-blocks :target-block target-block)
       (outliner-core/insert-blocks! conn
                                     (mapv #(op-construct/rewrite-block-title-with-retracted-refs db %) blocks)
                                     target-block
@@ -676,8 +680,9 @@
     (let [[ids target-id opts] args
           ids' (replay-entity-id-coll @conn ids)
           target-id' (or (replay-entity-id-value @conn target-id) target-id)
-          blocks (keep #(d/entity @conn %) ids')]
-      (when (empty? blocks)
+          blocks (keep #(d/entity @conn %) ids')
+          target (d/entity @conn target-id')]
+      (when (or (empty? blocks) (nil? target))
         (invalid-rebase-op! op {:args args}))
       (when (seq blocks)
         (let [opts' (or opts {})
@@ -864,7 +869,7 @@
              (prn :debug :transact :tx-data tx-data)
              (ldb/transact! conn tx-data {:outliner-op :transact}))
            (do
-             (precreate-missing-save-blocks! conn outliner-ops)
+             ;; (precreate-missing-save-blocks! conn outliner-ops)
              (doseq [op outliner-ops]
                (replay-canonical-outliner-op! conn op))))))
       (catch :default error
@@ -922,7 +927,7 @@
 
 (defn- apply-remote-tx-without-local-changes!
   [{:keys [conn remote-txs]}]
-  (ldb/batch-transact-with-temp-conn!
+  (ldb/batch-transact!
    conn
    {:rtc-tx? true
     :without-local-changes? true}
@@ -985,13 +990,22 @@
   (apply-remote-txs! repo client [{:tx-data tx-data}]))
 
 (defn- enqueue-local-tx-aux
-  [repo {:keys [tx-data db-after db-before] :as tx-report}]
+  [repo {:keys [tx-data db-after db-before tx-meta] :as tx-report}]
   (let [normalized (normalize-tx-data db-after db-before tx-data)
         reversed-datoms (reverse-tx-data db-before db-after tx-data)]
-    ;; (prn :debug :enqueue-local-tx :tx-data)
-    ;; (cljs.pprint/pprint tx-data)
-    ;; (prn :debug :enqueue-local-tx :normalized)
-    ;; (cljs.pprint/pprint normalized)
+    (prn :debug :enqueue-local-tx :tx-data)
+    (cljs.pprint/pprint tx-data)
+    (prn :debug :enqueue-local-tx :normalized)
+    (cljs.pprint/pprint normalized)
+    (when (and (= (:outliner-op tx-meta) :insert-blocks)
+               (not (:undo? tx-meta))
+               (not (some (fn [x]
+                            (and (= 5 (count x))
+                                 (= :db/add (first x))
+                                 (= :block/parent (nth x 2))))
+                          normalized)))
+      (throw (ex-info "no :block/parent provided when insert-blocks" {})))
+
     (when (seq normalized)
       (persist-local-tx! repo tx-report normalized reversed-datoms)
       (when-let [client @worker-state/*db-sync-client]

+ 8 - 0
src/main/frontend/worker/sync/download.cljs

@@ -194,6 +194,14 @@
       (close-import-state! state)
       (reset! *import-state nil))))
 
+(defn close-import-state-for-repo!
+  [repo]
+  (when-let [state @*import-state]
+    (when (= repo (:repo state))
+      (close-import-state! state)
+      (reset! *import-state nil)))
+  nil)
+
 (defn- require-import-state!
   [repo graph-id import-id]
   (let [state @*import-state]