Browse Source

wip: debug

Tienson Qin 2 years ago
parent
commit
232c1cb4c2

+ 8 - 2
deps.edn

@@ -2,8 +2,14 @@
  :deps
  {org.clojure/clojure                   {:mvn/version "1.11.1"}
   rum/rum                               {:mvn/version "0.12.9"}
-  datascript/datascript                 {:git/url "https://github.com/logseq/datascript" ;; fork
-                                         :sha     "f0922f4d10714636711bc0176409290e44ce2feb"}
+
+  datascript/datascript        {:local/root "../../datascript"
+                                :exclusions [persistent-sorted-set/persistent-sorted-set]}
+  ;; datascript/datascript        {:git/url "https://github.com/logseq/datascript" ;; fork
+  ;;                               :sha     "f0922f4d10714636711bc0176409290e44ce2feb"
+  ;;                               :exclusions [persistent-sorted-set/persistent-sorted-set]}
+
+  persistent-sorted-set/persistent-sorted-set {:local/root "../../persistent-sorted-set"}
   datascript-transit/datascript-transit {:mvn/version "0.3.0"}
   borkdude/rewrite-edn                  {:mvn/version "0.4.7"}
   funcool/promesa                       {:mvn/version "4.0.2"}

+ 7 - 2
deps/db/deps.edn

@@ -1,7 +1,12 @@
 {:deps
  ;; External deps should be kept in sync with https://github.com/logseq/nbb-logseq/blob/main/bb.edn
- {datascript/datascript        {:git/url "https://github.com/logseq/datascript" ;; fork
-                                :sha     "f0922f4d10714636711bc0176409290e44ce2feb"}
+ {datascript/datascript        {:local/root "../../../../datascript"
+                                :exclusions [persistent-sorted-set/persistent-sorted-set]}
+  ;; datascript/datascript        {:git/url "https://github.com/logseq/datascript" ;; fork
+  ;;                               :sha     "f0922f4d10714636711bc0176409290e44ce2feb"
+  ;;                               :exclusions [persistent-sorted-set/persistent-sorted-set]}
+
+  persistent-sorted-set/persistent-sorted-set {:local/root "../../../../persistent-sorted-set"}
   com.cognitect/transit-cljs   {:mvn/version "0.8.280"}
   cljs-bean/cljs-bean          {:mvn/version "1.5.0"}
   org.clojars.mmb90/cljs-cache {:mvn/version "0.1.4"}}

+ 1 - 4
deps/db/src/logseq/db.cljs

@@ -20,10 +20,7 @@
                               db-graph?
                               (assoc :block/format :markdown)))
                           default-db/built-in-pages)]
-      (d/transact! db-conn
-                   (concat
-                    [{:schema/version db-schema/version}]
-                    built-in-pages)))))
+      (d/transact! db-conn built-in-pages))))
 
 (defn start-conn
   "Create datascript conn with schema and default data"

+ 28 - 23
deps/db/src/logseq/db/sqlite/db.cljs

@@ -10,14 +10,9 @@
             [cljs.cache :as cache]
             [datascript.core :as d]
             [goog.object :as gobj]
-            [logseq.db.frontend.schema :as db-schema]))
-
-(defn- write-transit [data]
-  (t/write (t/writer :json) data))
-
-(defn- read-transit [s]
-  (t/read (t/reader :json) s))
-
+            [logseq.db.frontend.schema :as db-schema]
+            [datascript.transit :as dt]
+            [clojure.edn :as edn]))
 
 ;; Notice: this works only on Node.js environment, it doesn't support browser yet.
 
@@ -107,23 +102,33 @@
   (let [cache (cache/lru-cache-factory {} :threshold threshold)]
     (reify IStorage
       (-store [_ addr+data-seq]
+        (prn :debug :store-addr (map first addr+data-seq))
         (prn :debug :store {:addr-data addr+data-seq})
-        (let [data (map
-                    (fn [[addr data]]
-                      {:addr addr
-                       :content (write-transit data)})
-                    addr+data-seq)]
-          (upsert-addr-content! repo (bean/->js data))))
+        (let [data (->>
+                    (map
+                     (fn [[addr data]]
+                       #js {:addr addr
+                            :content (pr-str data)})
+                     addr+data-seq)
+                    (to-array))]
+          (upsert-addr-content! repo data)))
       (-restore [_ addr]
-        (when-let [content (if (cache/has? cache addr)
-                             (do
-                               (cache/hit cache addr)
-                               (cache/lookup cache addr))
-                             (when-let [result (restore-data-from-addr repo addr)]
-                               (cache/miss cache addr result)
-                               result))]
-          (prn {:content content})
-          (read-transit content))))))
+        (let [content (restore-data-from-addr repo addr)]
+          (when (nil? content)
+            (prn :debug :error :addr-not-exists addr))
+          (prn :debug :restored {:addr addr
+                                 ;; :content content
+                                 })
+          (edn/read-string content))
+        ;; (when-let [content (if (cache/has? cache addr)
+        ;;                      (do
+        ;;                        (cache/hit cache addr)
+        ;;                        (cache/lookup cache addr))
+        ;;                      (when-let [result (restore-data-from-addr repo addr)]
+        ;;                        (cache/miss cache addr result)
+        ;;                        result))]
+        ;;   (edn/read-string content))
+        ))))
 
 (defn open-db!
   [graphs-dir db-name]

+ 5 - 2
deps/outliner/deps.edn

@@ -1,7 +1,10 @@
 {:deps
  ;; External deps should be kept in sync with https://github.com/logseq/nbb-logseq/blob/main/bb.edn
- {datascript/datascript {:git/url "https://github.com/logseq/datascript" ;; fork
-                         :sha     "f0922f4d10714636711bc0176409290e44ce2feb"}
+ {datascript/datascript        {:local/root "../../../../datascript"
+                                :exclusions [persistent-sorted-set/persistent-sorted-set]}
+  persistent-sorted-set/persistent-sorted-set {:local/root "../../persistent-sorted-set"}
+  ;; datascript/datascript {:git/url "https://github.com/logseq/datascript" ;; fork
+  ;;                        :sha     "f0922f4d10714636711bc0176409290e44ce2feb"}
   logseq/db             {:local/root "../db"}
   com.cognitect/transit-cljs {:mvn/version "0.8.280"}}
  :aliases

+ 0 - 4
src/main/frontend/db/restore.cljs

@@ -141,10 +141,6 @@
     ;; TODO: Store schema in sqlite
     ;; (db-migrate/migrate attached-db)
 
-    (d/transact! conn [(react/kv :db/type "db")
-                       {:schema/version db-schema/version}]
-                 {:skip-persist? true})
-
     (when-not electron?
       (js/setTimeout
        (fn []

+ 3 - 2
src/main/frontend/handler/repo.cljs

@@ -7,6 +7,7 @@
             [frontend.date :as date]
             [frontend.db :as db]
             [frontend.db.restore :as db-restore]
+            [logseq.db.frontend.schema :as db-schema]
             [frontend.fs :as fs]
             [frontend.fs.nfs :as nfs]
             [frontend.handler.file :as file-handler]
@@ -546,8 +547,8 @@
           _ (start-repo-db-if-not-exists! full-graph-name)
           _ (state/add-repo! {:url full-graph-name})
           _ (route-handler/redirect-to-home!)
-          _ (db/transact! full-graph-name [(react/kv :db/type "db")]
-                          {:skip-persist? true})
+          _ (db/transact! full-graph-name [(react/kv :db/type "db")
+                                           (react/kv :schema/version db-schema/version)])
           initial-data (sqlite-util/build-db-initial-data config/config-default-content)
           _ (db/transact! full-graph-name initial-data)
           _ (repo-config-handler/set-repo-config-state! full-graph-name config/config-default-content)

+ 32 - 28
src/main/frontend/modules/outliner/pipeline.cljs

@@ -79,16 +79,19 @@
   (let [tx-meta (:tx-meta tx-report)
         {:keys [compute-path-refs? from-disk? new-graph? replace?]} tx-meta]
     (when (and (not from-disk?)
-               (not new-graph?)
-               (not compute-path-refs?))
-
-      (reset-editing-block-content! (:tx-data tx-report) tx-meta)
+               (not new-graph?))
+      (try
+        (reset-editing-block-content! (:tx-data tx-report) tx-meta)
+        (catch :default e
+          (prn :reset-editing-block-content)
+          (js/console.error e)))
 
       (let [{:keys [pages blocks]} (ds-report/get-blocks-and-pages tx-report)
             repo (state/get-current-repo)
-            tx (util/profile
-                "Compute path refs: "
-                (set (compute-block-path-refs-tx tx-report blocks)))
+            tx (when-not compute-path-refs?
+                 (util/profile
+                  "Compute path refs: "
+                  (set (compute-block-path-refs-tx tx-report blocks))))
             tx-report' (if (seq tx)
                          (let [refs-tx-data' (:tx-data (db/transact! repo tx {:outliner/transact? true
                                                                               :replace? true
@@ -99,32 +102,33 @@
             importing? (:graph/importing @state/state)
             deleted-block-uuids (set (outliner-pipeline/filter-deleted-blocks (:tx-data tx-report)))]
 
-        (when (and (seq deleted-block-uuids) (not replace?))
+        (when (and (seq deleted-block-uuids) (not replace?)
+                   (not compute-path-refs?))
           (delete-property-parent-block-if-empty! repo tx-report deleted-block-uuids))
 
+        (let [upsert-blocks (outliner-pipeline/build-upsert-blocks blocks deleted-block-uuids (:db-after tx-report'))
+              updated-blocks (remove (fn [b] (contains? (set deleted-block-uuids)  (:block/uuid b))) blocks)
+              tx-id (get-in tx-report' [:tempids :db/current-tx])
+              update-tx-ids (->>
+                             (map (fn [b]
+                                    (when-let [db-id (:db/id b)]
+                                      {:db/id db-id
+                                       :block/tx-id tx-id})) updated-blocks)
+                             (remove nil?))]
+          (when (and (seq update-tx-ids)
+                     (not (:update-tx-ids? tx-meta)))
+            (db/transact! repo update-tx-ids {:replace? true
+                                              :update-tx-ids? true}))
+          (when (config/db-based-graph? repo)
+            (when-not config/publishing?
+              (go
+                (if (util/electron?)
+                  (<! (persist-db/<transact-data repo (:tx-data tx-report) (:tx-meta tx-report)))
+                  (<! (persist-db/<transact-data repo upsert-blocks deleted-block-uuids)))))))
+
         (when-not importing?
           (react/refresh! repo tx-report'))
 
-        (when (and (not (:skip-persist? tx-meta))
-                   (not replace?)
-                   (not (:update-tx-ids? tx-meta)))
-          (let [upsert-blocks (outliner-pipeline/build-upsert-blocks blocks deleted-block-uuids (:db-after tx-report'))
-                updated-blocks (remove (fn [b] (contains? (set deleted-block-uuids)  (:block/uuid b))) blocks)
-                tx-id (get-in tx-report' [:tempids :db/current-tx])
-                update-tx-ids (map (fn [b]
-                                     (when-let [db-id (:db/id b)]
-                                       {:db/id db-id
-                                        :block/tx-id tx-id})) updated-blocks)]
-            (when (seq update-tx-ids)
-              (db/transact! repo update-tx-ids {:replace? true
-                                                :update-tx-ids? true}))
-            (when (config/db-based-graph? repo)
-              (when-not config/publishing?
-                (go
-                  (if (util/electron?)
-                    (<! (persist-db/<transact-data repo (:tx-data tx-report) (:tx-meta tx-report)))
-                    (<! (persist-db/<transact-data repo upsert-blocks deleted-block-uuids))))))))
-
         (when (and (not (:delete-files? tx-meta))
                    (not replace?))
           (doseq [p (seq pages)]