Browse Source

perf(app): better memory management

adamelmore 2 months ago
parent
commit
842f17d6d9

+ 6 - 2
packages/app/src/components/prompt-input.tsx

@@ -1557,13 +1557,17 @@ export const PromptInput: Component<PromptInputProps> = (props) => {
       })
 
       const timeoutMs = 5 * 60 * 1000
+      const timer = { id: undefined as number | undefined }
       const timeout = new Promise<Awaited<ReturnType<typeof WorktreeState.wait>>>((resolve) => {
-        setTimeout(() => {
+        timer.id = window.setTimeout(() => {
           resolve({ status: "failed", message: language.t("workspace.error.stillPreparing") })
         }, timeoutMs)
       })
 
-      const result = await Promise.race([WorktreeState.wait(sessionDirectory), abort, timeout])
+      const result = await Promise.race([WorktreeState.wait(sessionDirectory), abort, timeout]).finally(() => {
+        if (timer.id === undefined) return
+        clearTimeout(timer.id)
+      })
       pending.delete(session.id)
       if (controller.signal.aborted) return false
       if (result.status === "failed") throw new Error(result.message)

+ 68 - 28
packages/app/src/components/terminal.tsx

@@ -67,6 +67,19 @@ export const Terminal = (props: TerminalProps) => {
   let handleTextareaFocus: () => void
   let handleTextareaBlur: () => void
   let disposed = false
+  const cleanups: VoidFunction[] = []
+
+  const cleanup = () => {
+    if (!cleanups.length) return
+    const fns = cleanups.splice(0).reverse()
+    for (const fn of fns) {
+      try {
+        fn()
+      } catch {
+        // ignore
+      }
+    }
+  }
 
   const getTerminalColors = (): TerminalColors => {
     const mode = theme.mode()
@@ -128,7 +141,7 @@ export const Terminal = (props: TerminalProps) => {
       if (disposed) return
 
       const mod = loaded.mod
-      ghostty = loaded.ghostty
+      const g = loaded.ghostty
 
       const once = { value: false }
 
@@ -138,6 +151,13 @@ export const Terminal = (props: TerminalProps) => {
         url.password = window.__OPENCODE__?.serverPassword
       }
       const socket = new WebSocket(url)
+      cleanups.push(() => {
+        if (socket.readyState !== WebSocket.CLOSED && socket.readyState !== WebSocket.CLOSING) socket.close()
+      })
+      if (disposed) {
+        cleanup()
+        return
+      }
       ws = socket
 
       const t = new mod.Terminal({
@@ -148,8 +168,14 @@ export const Terminal = (props: TerminalProps) => {
         allowTransparency: true,
         theme: terminalColors(),
         scrollback: 10_000,
-        ghostty,
+        ghostty: g,
       })
+      cleanups.push(() => t.dispose())
+      if (disposed) {
+        cleanup()
+        return
+      }
+      ghostty = g
       term = t
 
       const copy = () => {
@@ -201,13 +227,17 @@ export const Terminal = (props: TerminalProps) => {
         return false
       })
 
-      fitAddon = new mod.FitAddon()
-      serializeAddon = new SerializeAddon()
-      t.loadAddon(serializeAddon)
-      t.loadAddon(fitAddon)
+      const fit = new mod.FitAddon()
+      const serializer = new SerializeAddon()
+      cleanups.push(() => (fit as unknown as { dispose?: VoidFunction }).dispose?.())
+      t.loadAddon(serializer)
+      t.loadAddon(fit)
+      fitAddon = fit
+      serializeAddon = serializer
 
       t.open(container)
       container.addEventListener("pointerdown", handlePointerDown)
+      cleanups.push(() => container.removeEventListener("pointerdown", handlePointerDown))
 
       handleTextareaFocus = () => {
         t.options.cursorBlink = true
@@ -218,6 +248,8 @@ export const Terminal = (props: TerminalProps) => {
 
       t.textarea?.addEventListener("focus", handleTextareaFocus)
       t.textarea?.addEventListener("blur", handleTextareaBlur)
+      cleanups.push(() => t.textarea?.removeEventListener("focus", handleTextareaFocus))
+      cleanups.push(() => t.textarea?.removeEventListener("blur", handleTextareaBlur))
 
       focusTerminal()
 
@@ -233,10 +265,11 @@ export const Terminal = (props: TerminalProps) => {
         })
       }
 
-      fitAddon.observeResize()
-      handleResize = () => fitAddon.fit()
+      fit.observeResize()
+      handleResize = () => fit.fit()
       window.addEventListener("resize", handleResize)
-      t.onResize(async (size) => {
+      cleanups.push(() => window.removeEventListener("resize", handleResize))
+      const onResize = t.onResize(async (size) => {
         if (socket.readyState === WebSocket.OPEN) {
           await sdk.client.pty
             .update({
@@ -249,20 +282,24 @@ export const Terminal = (props: TerminalProps) => {
             .catch(() => {})
         }
       })
-      t.onData((data) => {
+      cleanups.push(() => (onResize as unknown as { dispose?: VoidFunction }).dispose?.())
+      const onData = t.onData((data) => {
         if (socket.readyState === WebSocket.OPEN) {
           socket.send(data)
         }
       })
-      t.onKey((key) => {
+      cleanups.push(() => (onData as unknown as { dispose?: VoidFunction }).dispose?.())
+      const onKey = t.onKey((key) => {
         if (key.key == "Enter") {
           props.onSubmit?.()
         }
       })
+      cleanups.push(() => (onKey as unknown as { dispose?: VoidFunction }).dispose?.())
       // t.onScroll((ydisp) => {
       // console.log("Scroll position:", ydisp)
       // })
-      socket.addEventListener("open", () => {
+
+      const handleOpen = () => {
         local.onConnect?.()
         sdk.client.pty
           .update({
@@ -273,18 +310,27 @@ export const Terminal = (props: TerminalProps) => {
             },
           })
           .catch(() => {})
-      })
-      socket.addEventListener("message", (event) => {
+      }
+      socket.addEventListener("open", handleOpen)
+      cleanups.push(() => socket.removeEventListener("open", handleOpen))
+
+      const handleMessage = (event: MessageEvent) => {
         t.write(event.data)
-      })
-      socket.addEventListener("error", (error) => {
+      }
+      socket.addEventListener("message", handleMessage)
+      cleanups.push(() => socket.removeEventListener("message", handleMessage))
+
+      const handleError = (error: Event) => {
         if (disposed) return
         if (once.value) return
         once.value = true
         console.error("WebSocket error:", error)
         local.onConnectError?.(error)
-      })
-      socket.addEventListener("close", (event) => {
+      }
+      socket.addEventListener("error", handleError)
+      cleanups.push(() => socket.removeEventListener("error", handleError))
+
+      const handleClose = (event: CloseEvent) => {
         if (disposed) return
         // Normal closure (code 1000) means PTY process exited - server event handles cleanup
         // For other codes (network issues, server restart), trigger error handler
@@ -293,7 +339,9 @@ export const Terminal = (props: TerminalProps) => {
           once.value = true
           local.onConnectError?.(new Error(`WebSocket closed abnormally: ${event.code}`))
         }
-      })
+      }
+      socket.addEventListener("close", handleClose)
+      cleanups.push(() => socket.removeEventListener("close", handleClose))
     }
 
     void run().catch((err) => {
@@ -309,13 +357,6 @@ export const Terminal = (props: TerminalProps) => {
 
   onCleanup(() => {
     disposed = true
-    if (handleResize) {
-      window.removeEventListener("resize", handleResize)
-    }
-    container.removeEventListener("pointerdown", handlePointerDown)
-    term?.textarea?.removeEventListener("focus", handleTextareaFocus)
-    term?.textarea?.removeEventListener("blur", handleTextareaBlur)
-
     const t = term
     if (serializeAddon && props.onCleanup && t) {
       const buffer = (() => {
@@ -334,8 +375,7 @@ export const Terminal = (props: TerminalProps) => {
       })
     }
 
-    ws?.close()
-    t?.dispose()
+    cleanup()
   })
 
   return (

+ 70 - 2
packages/app/src/context/file.tsx

@@ -151,6 +151,28 @@ const WORKSPACE_KEY = "__workspace__"
 const MAX_FILE_VIEW_SESSIONS = 20
 const MAX_VIEW_FILES = 500
 
+const MAX_FILE_CONTENT_ENTRIES = 40
+const MAX_FILE_CONTENT_BYTES = 20 * 1024 * 1024
+
+const contentLru = new Map<string, number>()
+
+function approxBytes(content: FileContent) {
+  const patchBytes =
+    content.patch?.hunks.reduce((total, hunk) => {
+      return total + hunk.lines.reduce((sum, line) => sum + line.length, 0)
+    }, 0) ?? 0
+
+  return (content.content.length + (content.diff?.length ?? 0) + patchBytes) * 2
+}
+
+function touchContent(path: string, bytes?: number) {
+  const prev = contentLru.get(path)
+  if (prev === undefined && bytes === undefined) return
+  const value = bytes ?? prev ?? 0
+  contentLru.delete(path)
+  contentLru.set(path, value)
+}
+
 type ViewSession = ReturnType<typeof createViewSession>
 
 type ViewCacheEntry = {
@@ -315,10 +337,40 @@ export const { use: useFile, provider: FileProvider } = createSimpleContext({
       dir: { "": { expanded: true } },
     })
 
+    const evictContent = (keep?: Set<string>) => {
+      const protectedSet = keep ?? new Set<string>()
+      const total = () => {
+        return Array.from(contentLru.values()).reduce((sum, bytes) => sum + bytes, 0)
+      }
+
+      while (contentLru.size > MAX_FILE_CONTENT_ENTRIES || total() > MAX_FILE_CONTENT_BYTES) {
+        const path = contentLru.keys().next().value
+        if (!path) return
+
+        if (protectedSet.has(path)) {
+          touchContent(path)
+          if (contentLru.size <= protectedSet.size) return
+          continue
+        }
+
+        contentLru.delete(path)
+        if (!store.file[path]) continue
+        setStore(
+          "file",
+          path,
+          produce((draft) => {
+            draft.content = undefined
+            draft.loaded = false
+          }),
+        )
+      }
+    }
+
     createEffect(() => {
       scope()
       inflight.clear()
       treeInflight.clear()
+      contentLru.clear()
       setStore("file", {})
       setTree("node", {})
       setTree("dir", { "": { expanded: true } })
@@ -399,15 +451,20 @@ export const { use: useFile, provider: FileProvider } = createSimpleContext({
         .read({ path })
         .then((x) => {
           if (scope() !== directory) return
+          const content = x.data
           setStore(
             "file",
             path,
             produce((draft) => {
               draft.loaded = true
               draft.loading = false
-              draft.content = x.data
+              draft.content = content
             }),
           )
+
+          if (!content) return
+          touchContent(path, approxBytes(content))
+          evictContent(new Set([path]))
         })
         .catch((e) => {
           if (scope() !== directory) return
@@ -597,7 +654,18 @@ export const { use: useFile, provider: FileProvider } = createSimpleContext({
       listDir(parent, { force: true })
     })
 
-    const get = (input: string) => store.file[normalize(input)]
+    const get = (input: string) => {
+      const path = normalize(input)
+      const file = store.file[path]
+      const content = file?.content
+      if (!content) return file
+      if (contentLru.has(path)) {
+        touchContent(path)
+        return file
+      }
+      touchContent(path, approxBytes(content))
+      return file
+    }
 
     const scrollTop = (input: string) => view().scrollTop(normalize(input))
     const scrollLeft = (input: string) => view().scrollLeft(normalize(input))

+ 39 - 8
packages/app/src/context/global-sync.tsx

@@ -546,6 +546,37 @@ function createGlobalSync() {
     return promise
   }
 
+  function purgeMessageParts(setStore: SetStoreFunction<State>, messageID: string | undefined) {
+    if (!messageID) return
+    setStore(
+      produce((draft) => {
+        delete draft.part[messageID]
+      }),
+    )
+  }
+
+  function purgeSessionData(store: Store<State>, setStore: SetStoreFunction<State>, sessionID: string | undefined) {
+    if (!sessionID) return
+
+    const messages = store.message[sessionID]
+    const messageIDs = (messages ?? []).map((m) => m.id).filter((id): id is string => !!id)
+
+    setStore(
+      produce((draft) => {
+        delete draft.message[sessionID]
+        delete draft.session_diff[sessionID]
+        delete draft.todo[sessionID]
+        delete draft.permission[sessionID]
+        delete draft.question[sessionID]
+        delete draft.session_status[sessionID]
+
+        for (const messageID of messageIDs) {
+          delete draft.part[messageID]
+        }
+      }),
+    )
+  }
+
   const unsub = globalSDK.event.listen((e) => {
     const directory = e.name
     const event = e.details
@@ -651,9 +682,7 @@ function createGlobalSync() {
               }),
             )
           }
-
           cleanupSessionCaches(info.id)
-
           if (info.parentID) break
           setStore("sessionTotal", (value) => Math.max(0, value - 1))
           break
@@ -679,9 +708,7 @@ function createGlobalSync() {
             }),
           )
         }
-
         cleanupSessionCaches(sessionID)
-
         if (event.properties.info.parentID) break
         setStore("sessionTotal", (value) => Math.max(0, value - 1))
         break
@@ -757,15 +784,19 @@ function createGlobalSync() {
         break
       }
       case "message.part.removed": {
-        const parts = store.part[event.properties.messageID]
+        const messageID = event.properties.messageID
+        const parts = store.part[messageID]
         if (!parts) break
         const result = Binary.search(parts, event.properties.partID, (p) => p.id)
         if (result.found) {
           setStore(
-            "part",
-            event.properties.messageID,
             produce((draft) => {
-              draft.splice(result.index, 1)
+              const list = draft.part[messageID]
+              if (!list) return
+              const next = Binary.search(list, event.properties.partID, (p) => p.id)
+              if (!next.found) return
+              list.splice(next.index, 1)
+              if (list.length === 0) delete draft.part[messageID]
             }),
           )
         }

+ 21 - 3
packages/app/src/context/permission.tsx

@@ -67,7 +67,21 @@ export const { use: usePermission, provider: PermissionProvider } = createSimple
       }),
     )
 
-    const responded = new Set<string>()
+    const MAX_RESPONDED = 1000
+    const RESPONDED_TTL_MS = 60 * 60 * 1000
+    const responded = new Map<string, number>()
+
+    function pruneResponded(now: number) {
+      for (const [id, ts] of responded) {
+        if (now - ts < RESPONDED_TTL_MS) break
+        responded.delete(id)
+      }
+
+      for (const id of responded.keys()) {
+        if (responded.size <= MAX_RESPONDED) break
+        responded.delete(id)
+      }
+    }
 
     const respond: PermissionRespondFn = (input) => {
       globalSDK.client.permission.respond(input).catch(() => {
@@ -76,8 +90,12 @@ export const { use: usePermission, provider: PermissionProvider } = createSimple
     }
 
     function respondOnce(permission: PermissionRequest, directory?: string) {
-      if (responded.has(permission.id)) return
-      responded.add(permission.id)
+      const now = Date.now()
+      const hit = responded.has(permission.id)
+      responded.delete(permission.id)
+      responded.set(permission.id, now)
+      pruneResponded(now)
+      if (hit) return
       respond({
         sessionID: permission.sessionID,
         permissionID: permission.id,

+ 48 - 2
packages/app/src/pages/layout.tsx

@@ -685,12 +685,34 @@ export default function Layout(props: ParentProps) {
     running: number
   }
 
-  const prefetchChunk = 600
+  const prefetchChunk = 200
   const prefetchConcurrency = 1
   const prefetchPendingLimit = 6
   const prefetchToken = { value: 0 }
   const prefetchQueues = new Map<string, PrefetchQueue>()
 
+  const PREFETCH_MAX_SESSIONS_PER_DIR = 10
+  const prefetchedByDir = new Map<string, Map<string, true>>()
+
+  const lruFor = (directory: string) => {
+    const existing = prefetchedByDir.get(directory)
+    if (existing) return existing
+    const created = new Map<string, true>()
+    prefetchedByDir.set(directory, created)
+    return created
+  }
+
+  const markPrefetched = (directory: string, sessionID: string) => {
+    const lru = lruFor(directory)
+    if (lru.has(sessionID)) lru.delete(sessionID)
+    lru.set(sessionID, true)
+    while (lru.size > PREFETCH_MAX_SESSIONS_PER_DIR) {
+      const oldest = lru.keys().next().value as string | undefined
+      if (!oldest) return
+      lru.delete(oldest)
+    }
+  }
+
   createEffect(() => {
     params.dir
     globalSDK.url
@@ -783,6 +805,11 @@ export default function Layout(props: ParentProps) {
     if (q.inflight.has(session.id)) return
     if (q.pendingSet.has(session.id)) return
 
+    const lru = lruFor(directory)
+    const known = lru.has(session.id)
+    if (!known && lru.size >= PREFETCH_MAX_SESSIONS_PER_DIR && priority !== "high") return
+    markPrefetched(directory, session.id)
+
     if (priority === "high") q.pending.unshift(session.id)
     if (priority !== "high") q.pending.push(session.id)
     q.pendingSet.add(session.id)
@@ -1669,6 +1696,22 @@ export default function Layout(props: ParentProps) {
       pendingRename: false,
     })
 
+    const hoverPrefetch = { current: undefined as ReturnType<typeof setTimeout> | undefined }
+    const cancelHoverPrefetch = () => {
+      if (hoverPrefetch.current === undefined) return
+      clearTimeout(hoverPrefetch.current)
+      hoverPrefetch.current = undefined
+    }
+    const scheduleHoverPrefetch = () => {
+      if (hoverPrefetch.current !== undefined) return
+      hoverPrefetch.current = setTimeout(() => {
+        hoverPrefetch.current = undefined
+        prefetchSession(props.session)
+      }, 200)
+    }
+
+    onCleanup(cancelHoverPrefetch)
+
     const messageLabel = (message: Message) => {
       const parts = sessionStore.part[message.id] ?? []
       const text = parts.find((part): part is TextPart => part?.type === "text" && !part.synthetic && !part.ignored)
@@ -1679,7 +1722,10 @@ export default function Layout(props: ParentProps) {
       <A
         href={`${props.slug}/session/${props.session.id}`}
         class={`flex items-center justify-between gap-3 min-w-0 text-left w-full focus:outline-none transition-[padding] ${menu.open ? "pr-7" : ""} group-hover/session:pr-7 group-focus-within/session:pr-7 group-active/session:pr-7 ${props.dense ? "py-0.5" : "py-1"}`}
-        onMouseEnter={() => prefetchSession(props.session, "high")}
+        onPointerEnter={scheduleHoverPrefetch}
+        onPointerLeave={cancelHoverPrefetch}
+        onMouseEnter={scheduleHoverPrefetch}
+        onMouseLeave={cancelHoverPrefetch}
         onFocus={() => prefetchSession(props.session, "high")}
         onClick={() => {
           setState("hoverSession", undefined)

+ 62 - 10
packages/app/src/utils/persist.ts

@@ -18,7 +18,52 @@ const LEGACY_STORAGE = "default.dat"
 const GLOBAL_STORAGE = "opencode.global.dat"
 const LOCAL_PREFIX = "opencode."
 const fallback = { disabled: false }
-const cache = new Map<string, string>()
+
+const CACHE_MAX_ENTRIES = 500
+const CACHE_MAX_BYTES = 8 * 1024 * 1024
+
+type CacheEntry = { value: string; bytes: number }
+const cache = new Map<string, CacheEntry>()
+const cacheTotal = { bytes: 0 }
+
+function cacheDelete(key: string) {
+  const entry = cache.get(key)
+  if (!entry) return
+  cacheTotal.bytes -= entry.bytes
+  cache.delete(key)
+}
+
+function cachePrune() {
+  for (;;) {
+    if (cache.size <= CACHE_MAX_ENTRIES && cacheTotal.bytes <= CACHE_MAX_BYTES) return
+    const oldest = cache.keys().next().value as string | undefined
+    if (!oldest) return
+    cacheDelete(oldest)
+  }
+}
+
+function cacheSet(key: string, value: string) {
+  const bytes = value.length * 2
+  if (bytes > CACHE_MAX_BYTES) {
+    cacheDelete(key)
+    return
+  }
+
+  const entry = cache.get(key)
+  if (entry) cacheTotal.bytes -= entry.bytes
+  cache.delete(key)
+  cache.set(key, { value, bytes })
+  cacheTotal.bytes += bytes
+  cachePrune()
+}
+
+function cacheGet(key: string) {
+  const entry = cache.get(key)
+  if (!entry) return
+  cache.delete(key)
+  cache.set(key, entry)
+  return entry.value
+}
 
 function quota(error: unknown) {
   if (error instanceof DOMException) {
@@ -63,9 +108,11 @@ function evict(storage: Storage, keep: string, value: string) {
 
   for (const item of items) {
     storage.removeItem(item.key)
+    cacheDelete(item.key)
 
     try {
       storage.setItem(keep, value)
+      cacheSet(keep, value)
       return true
     } catch (error) {
       if (!quota(error)) throw error
@@ -78,6 +125,7 @@ function evict(storage: Storage, keep: string, value: string) {
 function write(storage: Storage, key: string, value: string) {
   try {
     storage.setItem(key, value)
+    cacheSet(key, value)
     return true
   } catch (error) {
     if (!quota(error)) throw error
@@ -85,13 +133,17 @@ function write(storage: Storage, key: string, value: string) {
 
   try {
     storage.removeItem(key)
+    cacheDelete(key)
     storage.setItem(key, value)
+    cacheSet(key, value)
     return true
   } catch (error) {
     if (!quota(error)) throw error
   }
 
-  return evict(storage, key, value)
+  const ok = evict(storage, key, value)
+  if (!ok) cacheSet(key, value)
+  return ok
 }
 
 function snapshot(value: unknown) {
@@ -148,7 +200,7 @@ function localStorageWithPrefix(prefix: string): SyncStorage {
   return {
     getItem: (key) => {
       const name = item(key)
-      const cached = cache.get(name)
+      const cached = cacheGet(name)
       if (fallback.disabled && cached !== undefined) return cached
 
       const stored = (() => {
@@ -160,12 +212,12 @@ function localStorageWithPrefix(prefix: string): SyncStorage {
         }
       })()
       if (stored === null) return cached ?? null
-      cache.set(name, stored)
+      cacheSet(name, stored)
       return stored
     },
     setItem: (key, value) => {
       const name = item(key)
-      cache.set(name, value)
+      cacheSet(name, value)
       if (fallback.disabled) return
       try {
         if (write(localStorage, name, value)) return
@@ -177,7 +229,7 @@ function localStorageWithPrefix(prefix: string): SyncStorage {
     },
     removeItem: (key) => {
       const name = item(key)
-      cache.delete(name)
+      cacheDelete(name)
       if (fallback.disabled) return
       try {
         localStorage.removeItem(name)
@@ -191,7 +243,7 @@ function localStorageWithPrefix(prefix: string): SyncStorage {
 function localStorageDirect(): SyncStorage {
   return {
     getItem: (key) => {
-      const cached = cache.get(key)
+      const cached = cacheGet(key)
       if (fallback.disabled && cached !== undefined) return cached
 
       const stored = (() => {
@@ -203,11 +255,11 @@ function localStorageDirect(): SyncStorage {
         }
       })()
       if (stored === null) return cached ?? null
-      cache.set(key, stored)
+      cacheSet(key, stored)
       return stored
     },
     setItem: (key, value) => {
-      cache.set(key, value)
+      cacheSet(key, value)
       if (fallback.disabled) return
       try {
         if (write(localStorage, key, value)) return
@@ -218,7 +270,7 @@ function localStorageDirect(): SyncStorage {
       fallback.disabled = true
     },
     removeItem: (key) => {
-      cache.delete(key)
+      cacheDelete(key)
       if (fallback.disabled) return
       try {
         localStorage.removeItem(key)

+ 29 - 10
packages/app/src/utils/speech.ts

@@ -78,6 +78,7 @@ export function createSpeechRecognition(opts?: {
   let lastInterimSuffix = ""
   let shrinkCandidate: string | undefined
   let commitTimer: number | undefined
+  let restartTimer: number | undefined
 
   const cancelPendingCommit = () => {
     if (commitTimer === undefined) return
@@ -85,6 +86,26 @@ export function createSpeechRecognition(opts?: {
     commitTimer = undefined
   }
 
+  const clearRestart = () => {
+    if (restartTimer === undefined) return
+    window.clearTimeout(restartTimer)
+    restartTimer = undefined
+  }
+
+  const scheduleRestart = () => {
+    clearRestart()
+    if (!shouldContinue) return
+    if (!recognition) return
+    restartTimer = window.setTimeout(() => {
+      restartTimer = undefined
+      if (!shouldContinue) return
+      if (!recognition) return
+      try {
+        recognition.start()
+      } catch {}
+    }, 150)
+  }
+
   const commitSegment = (segment: string) => {
     const nextCommitted = appendSegment(committedText, segment)
     if (nextCommitted === committedText) return
@@ -214,17 +235,14 @@ export function createSpeechRecognition(opts?: {
     }
 
     recognition.onerror = (e: { error: string }) => {
+      clearRestart()
       cancelPendingCommit()
       lastInterimSuffix = ""
       shrinkCandidate = undefined
       if (e.error === "no-speech" && shouldContinue) {
         setStore("interim", "")
         if (opts?.onInterim) opts.onInterim("")
-        setTimeout(() => {
-          try {
-            recognition?.start()
-          } catch {}
-        }, 150)
+        scheduleRestart()
         return
       }
       shouldContinue = false
@@ -232,6 +250,7 @@ export function createSpeechRecognition(opts?: {
     }
 
     recognition.onstart = () => {
+      clearRestart()
       sessionCommitted = ""
       pendingHypothesis = ""
       cancelPendingCommit()
@@ -243,22 +262,20 @@ export function createSpeechRecognition(opts?: {
     }
 
     recognition.onend = () => {
+      clearRestart()
       cancelPendingCommit()
       lastInterimSuffix = ""
       shrinkCandidate = undefined
       setStore("isRecording", false)
       if (shouldContinue) {
-        setTimeout(() => {
-          try {
-            recognition?.start()
-          } catch {}
-        }, 150)
+        scheduleRestart()
       }
     }
   }
 
   const start = () => {
     if (!recognition) return
+    clearRestart()
     shouldContinue = true
     sessionCommitted = ""
     pendingHypothesis = ""
@@ -274,6 +291,7 @@ export function createSpeechRecognition(opts?: {
   const stop = () => {
     if (!recognition) return
     shouldContinue = false
+    clearRestart()
     promotePending()
     cancelPendingCommit()
     lastInterimSuffix = ""
@@ -287,6 +305,7 @@ export function createSpeechRecognition(opts?: {
 
   onCleanup(() => {
     shouldContinue = false
+    clearRestart()
     promotePending()
     cancelPendingCommit()
     lastInterimSuffix = ""

+ 32 - 17
packages/app/src/utils/worktree.ts

@@ -13,7 +13,21 @@ type State =
     }
 
 const state = new Map<string, State>()
-const waiters = new Map<string, Array<(state: State) => void>>()
+const waiters = new Map<
+  string,
+  {
+    promise: Promise<State>
+    resolve: (state: State) => void
+  }
+>()
+
+function deferred() {
+  const box = { resolve: (_: State) => {} }
+  const promise = new Promise<State>((resolve) => {
+    box.resolve = resolve
+  })
+  return { promise, resolve: box.resolve }
+}
 
 export const Worktree = {
   get(directory: string) {
@@ -27,32 +41,33 @@ export const Worktree = {
   },
   ready(directory: string) {
     const key = normalize(directory)
-    state.set(key, { status: "ready" })
-    const list = waiters.get(key)
-    if (!list) return
+    const next = { status: "ready" } as const
+    state.set(key, next)
+    const waiter = waiters.get(key)
+    if (!waiter) return
     waiters.delete(key)
-    for (const fn of list) fn({ status: "ready" })
+    waiter.resolve(next)
   },
   failed(directory: string, message: string) {
     const key = normalize(directory)
-    state.set(key, { status: "failed", message })
-    const list = waiters.get(key)
-    if (!list) return
+    const next = { status: "failed", message } as const
+    state.set(key, next)
+    const waiter = waiters.get(key)
+    if (!waiter) return
     waiters.delete(key)
-    for (const fn of list) fn({ status: "failed", message })
+    waiter.resolve(next)
   },
   wait(directory: string) {
     const key = normalize(directory)
     const current = state.get(key)
     if (current && current.status !== "pending") return Promise.resolve(current)
 
-    return new Promise<State>((resolve) => {
-      const list = waiters.get(key)
-      if (!list) {
-        waiters.set(key, [resolve])
-        return
-      }
-      list.push(resolve)
-    })
+    const existing = waiters.get(key)
+    if (existing) return existing.promise
+
+    const waiter = deferred()
+
+    waiters.set(key, waiter)
+    return waiter.promise
   },
 }

+ 108 - 0
specs/09-persist-cache-memory-bounds.md

@@ -0,0 +1,108 @@
+## Persist in-memory cache bounds
+
+Fix unbounded `persist.ts` string cache growth
+
+---
+
+### Summary
+
+`packages/app/src/utils/persist.ts` maintains a module-level `cache: Map<string, string>()` that mirrors values written to storage. This cache can retain very large JSON strings (prompt history, image dataUrls, terminal buffers) indefinitely, even after the underlying `localStorage` keys are evicted. Over long sessions this can become an in-process memory leak.
+
+This spec adds explicit bounds (entries + approximate bytes) and makes eviction/removal paths delete from the in-memory cache.
+
+---
+
+### Scoped files (parallel-safe)
+
+- `packages/app/src/utils/persist.ts`
+
+---
+
+### Goals
+
+- Prevent unbounded memory growth from the module-level persist cache
+- Ensure keys removed/evicted from storage are also removed from the in-memory cache
+- Preserve current semantics when `localStorage` access throws (fallback mode)
+- Keep changes self-contained to `persist.ts`
+
+---
+
+### Non-goals
+
+- Changing persisted schemas or moving payloads out of KV storage (covered elsewhere)
+- Introducing a shared cache utility used by multiple modules
+
+---
+
+### Current state
+
+- `cache` stores raw strings for every key read/written.
+- `evict()` removes items from `localStorage` but does not clear the corresponding entries from `cache`.
+- When `localStorage` is unavailable (throws), reads fall back to `cache`, so the cache is also a functional in-memory persistence layer.
+
+---
+
+### Proposed approach
+
+1. Replace `cache: Map<string, string>` with a bounded LRU-like map
+
+- Store `{ value: string, bytes: number }` per key.
+- Maintain a running `totalBytes`.
+- Enforce caps:
+  - `CACHE_MAX_ENTRIES` (e.g. 500)
+  - `CACHE_MAX_BYTES` (e.g. 8 _ 1024 _ 1024)
+- Use Map insertion order as LRU:
+  - On `get`, re-insert the key to the end.
+  - On `set`, insert/update then evict oldest until within bounds.
+- Approximate bytes as `value.length * 2` (UTF-16) to avoid `TextEncoder` allocations.
+
+2. Ensure all removal paths clear the in-memory cache
+
+- In `localStorageDirect().removeItem` and `localStorageWithPrefix().removeItem`: already calls `cache.delete(name)`; keep.
+- In `write(...)` failure recovery where it calls `storage.removeItem(key)`: also `cache.delete(key)`.
+- In `evict(...)` loop where it removes large keys: also `cache.delete(item.key)`.
+
+3. Add a small dev-only diagnostic (optional)
+
+- In dev, expose a lightweight `cacheStats()` helper (entries, totalBytes) for debugging memory reports.
+
+---
+
+### Implementation steps
+
+1. Introduce constants and cache helpers in `persist.ts`
+
+- `const CACHE_MAX_ENTRIES = ...`
+- `const CACHE_MAX_BYTES = ...`
+- `function cacheSet(key: string, value: string)`
+- `function cacheGet(key: string): string | undefined`
+- `function cacheDelete(key: string)`
+- `function cachePrune()`
+
+2. Route all existing `cache.set/get/delete` calls through helpers
+
+- `localStorageDirect()`
+- `localStorageWithPrefix()`
+
+3. Update `evict()` and `write()` to delete from cache when deleting from storage
+
+4. (Optional) Add dev logging guardrails
+
+- If a single value exceeds `CACHE_MAX_BYTES`, cache it but immediately prune older keys (or refuse to cache it and keep behavior consistent).
+
+---
+
+### Acceptance criteria
+
+- Repeatedly loading/saving large persisted values does not cause unbounded `cache` growth (entries and bytes are capped)
+- Values removed from `localStorage` by `evict()` are not returned later via the in-memory cache
+- App remains functional when `localStorage` throws (fallback mode still returns cached values, subject to caps)
+
+---
+
+### Validation plan
+
+- Manual:
+  - Open app, perform actions that write large state (image attachments, terminal output, long sessions)
+  - Use browser memory tools to confirm JS heap does not grow linearly with repeated writes
+  - Simulate quota eviction (force small storage quota / fill storage) and confirm `cache` does not retain evicted keys indefinitely

+ 106 - 0
specs/10-global-sync-message-part-cleanup.md

@@ -0,0 +1,106 @@
+## GlobalSync message/part cleanup
+
+Prevent stale message parts and per-session maps from accumulating
+
+---
+
+### Summary
+
+`packages/app/src/context/global-sync.tsx` keeps per-directory child stores that include:
+
+- `message: { [sessionID]: Message[] }`
+- `part: { [messageID]: Part[] }`
+
+Currently:
+
+- `message.removed` removes the message from `message[sessionID]` but does not delete `part[messageID]`.
+- `session.deleted` / archived sessions remove the session from the list but do not clear `message[...]`, `part[...]`, `session_diff[...]`, `todo[...]`, etc.
+
+This can retain large arrays long after they are no longer reachable from UI.
+
+This spec adds explicit cleanup on the relevant events without changing cache strategy or introducing new cross-module eviction utilities.
+
+---
+
+### Scoped files (parallel-safe)
+
+- `packages/app/src/context/global-sync.tsx`
+
+---
+
+### Goals
+
+- Delete `part[messageID]` when a message is removed
+- Clear per-session maps when a session is deleted or archived
+- Keep changes limited to event handling in `global-sync.tsx`
+
+---
+
+### Non-goals
+
+- Implementing LRU/TTL eviction across sessions/directories (separate work)
+- Changing how `sync.tsx` or layout prefetch populates message caches
+
+---
+
+### Current state
+
+- Message list and part map can diverge over time.
+- Deleting/archiving sessions does not reclaim memory for message history, parts, diffs, todos, permissions, questions.
+
+---
+
+### Proposed approach
+
+Add small helper functions inside `createGlobalSync()` to keep event handler readable:
+
+- `purgeMessageParts(setStore, messageID)`
+- `purgeSessionData(store, setStore, sessionID)`
+  - delete `message[sessionID]`
+  - for each known message in that list, delete `part[messageID]`
+  - delete `session_diff[sessionID]`, `todo[sessionID]`, `permission[sessionID]`, `question[sessionID]`, `session_status[sessionID]`
+
+Then wire these into the existing event switch:
+
+- `message.removed`: after removing from `message[sessionID]`, also delete `part[messageID]`
+- `session.updated` when `time.archived` is set: call `purgeSessionData(...)`
+- `session.deleted`: call `purgeSessionData(...)`
+
+Notes:
+
+- Use `setStore(produce(...))` to `delete` object keys safely.
+- Purge should be idempotent (safe if called when keys are missing).
+
+---
+
+### Implementation steps
+
+1. Add purge helpers near the event listener in `global-sync.tsx`
+
+2. Update event handlers
+
+- `message.removed`
+- `session.updated` (archived path)
+- `session.deleted`
+
+3. (Optional) Tighten `message.part.removed`
+
+- If a part list becomes empty after removal, optionally delete `part[messageID]` as well.
+
+---
+
+### Acceptance criteria
+
+- After a `message.removed` event, `store.part[messageID]` is `undefined`
+- After `session.deleted` or archive, all per-session maps for that `sessionID` are removed
+- No runtime errors when purging sessions/messages that were never hydrated
+
+---
+
+### Validation plan
+
+- Manual:
+  - Load a session with many messages
+  - Trigger message delete/remove events (or simulate by calling handlers in dev)
+  - Confirm the associated `part` entries are removed
+  - Delete/archive a session and confirm globalSync store no longer holds its message/part data

+ 103 - 0
specs/11-layout-prefetch-memory-budget.md

@@ -0,0 +1,103 @@
+## Layout prefetch memory budget
+
+Reduce sidebar hover prefetch from ballooning message caches
+
+---
+
+### Summary
+
+`packages/app/src/pages/layout.tsx` prefetches message history into `globalSync.child(directory)`.
+
+On hover and navigation, the current implementation can prefetch many sessions (each up to `prefetchChunk = 600` messages + parts). Since the global sync store has no eviction today, scanning the sidebar can permanently grow memory.
+
+This spec limits how much we prefetch (chunk size + number of sessions) and adds debounced hover prefetch to avoid accidental flooding.
+
+---
+
+### Scoped files (parallel-safe)
+
+- `packages/app/src/pages/layout.tsx`
+
+---
+
+### Goals
+
+- Reduce the maximum amount of data prefetched per session
+- Limit the number of sessions that can be prefetched per directory per app lifetime
+- Avoid triggering prefetch for brief/accidental hovers
+- Keep changes local to `layout.tsx`
+
+---
+
+### Non-goals
+
+- Implementing eviction of already-prefetched data inside global sync (separate work)
+- Changing server APIs
+
+---
+
+### Current state
+
+- `prefetchChunk` is 600.
+- Hovering many sessions can enqueue many prefetches over time.
+- Once prefetched, message/part data remains in memory until reload.
+
+---
+
+### Proposed approach
+
+1. Lower the prefetch page size
+
+- Change `prefetchChunk` from 600 to a smaller value (e.g. 200).
+- Rationale: prefetch is for fast first render; the session page can load more as needed.
+
+2. Add a per-directory prefetch budget
+
+- Track a small LRU of prefetched session IDs per directory (Map insertion order).
+- Add `PREFETCH_MAX_SESSIONS_PER_DIR` (e.g. 8-12).
+- Before queueing a new prefetch:
+  - if already cached in `store.message[sessionID]`, allow
+  - else if budget exceeded and priority is not `high`, skip
+  - else allow and record in LRU
+
+3. Debounce hover-triggered prefetch
+
+- For sidebar session entries that call `prefetchSession(..., "high")` on hover:
+  - schedule after ~150-250ms
+  - cancel if pointer leaves before the timer fires
+
+---
+
+### Implementation steps
+
+1. Update constants in `layout.tsx`
+
+- `prefetchChunk`
+- add `prefetchMaxSessionsPerDir`
+
+2. Add `prefetchedByDir: Map<string, Map<string, true>>` (or similar)
+
+- Helper: `markPrefetched(directory, sessionID)` with LRU behavior and max size.
+- Helper: `canPrefetch(directory, sessionID, priority)`.
+
+3. Integrate checks into `prefetchSession(...)`
+
+4. Debounce hover prefetch in the sidebar session item component (still in `layout.tsx`)
+
+---
+
+### Acceptance criteria
+
+- Prefetch requests never fetch more than the new `prefetchChunk` messages per session
+- For a given directory, total prefetched sessions does not exceed the configured budget (except current/adjacent high-priority navigations if explicitly allowed)
+- Rapid mouse movement over the session list does not trigger a prefetch storm
+
+---
+
+### Validation plan
+
+- Manual:
+  - Open sidebar with many sessions
+  - Move cursor over session list quickly; confirm few/no prefetch requests
+  - Hover intentionally; confirm prefetch happens after debounce
+  - Confirm the number of prefetched sessions per directory stays capped (via dev logging or inspecting store)

+ 92 - 0
specs/12-terminal-unmount-race-cleanup.md

@@ -0,0 +1,92 @@
+## Terminal unmount race cleanup
+
+Prevent Ghostty Terminal/WebSocket leaks when unmounting mid-init
+
+---
+
+### Summary
+
+`packages/app/src/components/terminal.tsx` initializes Ghostty in `onMount` via async steps (`import("ghostty-web")`, `Ghostty.load()`, WebSocket creation, terminal creation, listeners). If the component unmounts while awaits are pending, `onCleanup` runs before `ws`/`term` exist. The async init can then continue and create resources that never get disposed.
+
+This spec makes initialization abortable and ensures resources created after unmount are immediately cleaned up.
+
+---
+
+### Scoped files (parallel-safe)
+
+- `packages/app/src/components/terminal.tsx`
+
+---
+
+### Goals
+
+- Never leave a WebSocket open after the terminal component unmounts
+- Never leave window/container/textarea event listeners attached after unmount
+- Avoid creating terminal resources if `disposed` is already true
+
+---
+
+### Non-goals
+
+- Reworking terminal buffering/persistence format
+- Changing PTY server protocol
+
+---
+
+### Current state
+
+- `disposed` is checked in some WebSocket event handlers, but not during async init.
+- `onCleanup` closes/disposes only the resources already assigned at cleanup time.
+
+---
+
+### Proposed approach
+
+1. Guard async init steps
+
+- After each `await`, check `disposed` and return early.
+
+2. Register cleanups as resources are created
+
+- Maintain an array of cleanup callbacks (`cleanups: VoidFunction[]`).
+- When creating `socket`, `term`, adding event listeners, etc., push the corresponding cleanup.
+- In `onCleanup`, run all registered cleanups exactly once.
+
+3. Avoid mutating shared vars until safe
+
+- Prefer local variables inside `run()` and assign to outer `ws`/`term` only after confirming not disposed.
+
+---
+
+### Implementation steps
+
+1. Add `const cleanups: VoidFunction[] = []` and `const cleanup = () => { ... }` in component scope
+
+2. In `onCleanup`, set `disposed = true` and call `cleanup()`
+
+3. In `run()`:
+
+- `await import(...)` -> if disposed return
+- `await Ghostty.load()` -> if disposed return
+- create WebSocket -> if disposed, close it and return
+- create Terminal -> if disposed, dispose + close socket and return
+- when adding listeners, register removers in `cleanups`
+
+4. Ensure `cleanup()` is idempotent
+
+---
+
+### Acceptance criteria
+
+- Rapidly mounting/unmounting terminal components does not leave open WebSockets
+- No `resize` listeners remain after unmount
+- No errors are thrown if unmount occurs mid-initialization
+
+---
+
+### Validation plan
+
+- Manual:
+  - Open a session and rapidly switch sessions/tabs to force terminal unmount/mount
+  - Verify via devtools that no orphan WebSocket connections remain
+  - Verify that terminal continues to work normally when kept mounted

+ 73 - 0
specs/13-speech-recognition-timeout-cleanup.md

@@ -0,0 +1,73 @@
+## Speech recognition timeout cleanup
+
+Stop stray restart timers from keeping recognition alive
+
+---
+
+### Summary
+
+`packages/app/src/utils/speech.ts` schedules 150ms `setTimeout` restarts in `recognition.onerror` and `recognition.onend` when `shouldContinue` is true. These timers are not tracked or cleared, so they can fire after `stop()`/cleanup and call `recognition.start()`, keeping recognition + closures alive unexpectedly.
+
+This spec tracks restart timers explicitly and clears them on stop/cleanup.
+
+---
+
+### Scoped files (parallel-safe)
+
+- `packages/app/src/utils/speech.ts`
+
+---
+
+### Goals
+
+- Ensure no restart timers remain scheduled after `stop()` or `onCleanup`
+- Prevent `recognition.start()` from being called after cleanup
+- Keep behavior identical in the normal recording flow
+
+---
+
+### Non-goals
+
+- Changing the recognition UX/state machine beyond timer tracking
+
+---
+
+### Proposed approach
+
+- Add `let restartTimer: number | undefined`.
+- Add helpers:
+  - `clearRestart()`
+  - `scheduleRestart()` (guards `shouldContinue` + `recognition`)
+- Replace both raw `setTimeout(..., 150)` uses with `window.setTimeout` stored in `restartTimer`.
+- Call `clearRestart()` in:
+  - `start()`
+  - `stop()`
+  - `onCleanup(...)`
+  - `recognition.onstart` (reset state)
+  - any path that exits recording due to error
+
+---
+
+### Implementation steps
+
+1. Introduce `restartTimer` and helpers
+
+2. Replace `setTimeout(() => recognition?.start(), 150)` occurrences
+
+3. Clear the timer in all stop/cleanup paths
+
+---
+
+### Acceptance criteria
+
+- After calling `stop()` or disposing the creator, there are no delayed restarts
+- No unexpected `recognition.start()` calls occur after recording is stopped
+
+---
+
+### Validation plan
+
+- Manual:
+  - Start/stop recording repeatedly
+  - Trigger a `no-speech` error and confirm restarts only happen while recording is active
+  - Navigate away/unmount the component using `createSpeechRecognition` and confirm no restarts happen afterward

+ 71 - 0
specs/14-prompt-worktree-timeout-cleanup.md

@@ -0,0 +1,71 @@
+## Prompt worktree timeout cleanup
+
+Clear `waitForWorktree()` timers to avoid retaining closures for 5 minutes
+
+---
+
+### Summary
+
+`packages/app/src/components/prompt-input.tsx` creates a 5-minute `setTimeout` inside `waitForWorktree()` as part of a `Promise.race`. If the worktree becomes ready quickly, the timeout still stays scheduled until it fires, retaining its closure (which can capture session and UI state) for up to 5 minutes. Repeated sends can accumulate many concurrent long-lived timers.
+
+This spec makes the timeout cancelable and clears it when the race finishes.
+
+---
+
+### Scoped files (parallel-safe)
+
+- `packages/app/src/components/prompt-input.tsx`
+
+---
+
+### Goals
+
+- Ensure the 5-minute timeout is cleared as soon as `Promise.race` resolves
+- Avoid retaining large closures unnecessarily
+- Keep behavior identical for real timeouts
+
+---
+
+### Non-goals
+
+- Changing the worktree wait UX
+- Changing the WorktreeState API
+
+---
+
+### Proposed approach
+
+- Track the timeout handle explicitly:
+  - `let timeoutId: number | undefined`
+  - `timeoutId = window.setTimeout(...)`
+
+- After `Promise.race(...)` resolves (success, abort, or timeout), call `clearTimeout(timeoutId)` when set.
+
+- Keep the existing 5-minute duration and result handling.
+
+---
+
+### Implementation steps
+
+1. In `waitForWorktree()` create the timeout promise with an outer `timeoutId` variable
+
+2. After awaiting the race, clear the timeout if it exists
+
+3. Ensure `pending.delete(session.id)` and UI cleanup behavior remains unchanged
+
+---
+
+### Acceptance criteria
+
+- When the worktree becomes ready quickly, no 5-minute timeout remains scheduled
+- When the worktree truly times out, behavior is unchanged (same error shown, same cleanup)
+
+---
+
+### Validation plan
+
+- Manual:
+  - Trigger prompt send in a directory that is already ready; confirm no long timers remain (devtools)
+  - Trigger a worktree pending state and confirm:
+    - timeout fires at ~5 minutes
+    - cleanup runs

+ 96 - 0
specs/15-file-content-cache-bounds.md

@@ -0,0 +1,96 @@
+## File content cache bounds
+
+Add explicit caps for loaded file contents in `FileProvider`
+
+---
+
+### Summary
+
+`packages/app/src/context/file.tsx` caches file contents in-memory (`store.file[path].content`) for every loaded file within the current directory scope. Over a long session (reviewing many diffs/files), this can grow without bound.
+
+This spec adds an in-module LRU + size cap for file _contents_ only, keeping metadata entries while evicting the heavy payload.
+
+---
+
+### Scoped files (parallel-safe)
+
+- `packages/app/src/context/file.tsx`
+
+---
+
+### Goals
+
+- Cap the number of in-memory file contents retained per directory
+- Optionally cap total approximate bytes across loaded contents
+- Avoid evicting content that is actively being used/rendered
+- Keep changes localized to `file.tsx`
+
+---
+
+### Non-goals
+
+- Changing persisted file-view state (`file-view`) limits (already pruned separately)
+- Introducing a shared cache utility used elsewhere
+
+---
+
+### Proposed approach
+
+1. Track content entries in an LRU Map
+
+- `const contentLru = new Map<string, number>()` where value is approximate bytes.
+- On successful `load(path)` completion, call `touchContent(path, bytes)`.
+- In `get(path)`, if the file has `content`, call `touchContent(path)` to keep active files hot.
+
+2. Evict least-recently-used contents when over cap
+
+- Add constants:
+  - `MAX_FILE_CONTENT_ENTRIES` (e.g. 30-50)
+  - `MAX_FILE_CONTENT_BYTES` (optional; e.g. 10-25MB)
+- When evicting a path:
+  - remove it from `contentLru`
+  - clear `store.file[path].content`
+  - set `store.file[path].loaded = false` (or keep loaded but ensure UI can reload)
+
+3. Reset LRU on directory scope change
+
+- The existing scope reset already clears `store.file`; also clear the LRU map.
+
+---
+
+### Implementation steps
+
+1. Add LRU state + helper functions
+
+- `approxBytes(fileContent)` (prefer `content.length * 2`)
+- `touchContent(path, bytes?)`
+- `evictContent(keep?: Set<string>)`
+
+2. Touch on content load
+
+- After setting `draft.content = x.data`, compute bytes and touch
+
+3. Touch on `get()` usage
+
+- If `store.file[path]?.content` exists, touch
+
+4. Evict on every content set
+
+- After `touchContent`, run eviction until within caps
+
+---
+
+### Acceptance criteria
+
+- Loading hundreds of files does not grow memory linearly; content retention plateaus
+- Actively viewed file content is not evicted under normal use
+- Evicted files can be reloaded correctly when accessed again
+
+---
+
+### Validation plan
+
+- Manual:
+  - Load many distinct files (e.g. via review tab)
+  - Confirm only the latest N files retain `content`
+  - Switch back to an older file; confirm it reloads without UI errors

+ 90 - 0
specs/16-worktree-waiter-dedup-and-prune.md

@@ -0,0 +1,90 @@
+## Worktree waiter dedup + pruning
+
+Prevent `Worktree.wait()` from accumulating resolver closures
+
+---
+
+### Summary
+
+`packages/app/src/utils/worktree.ts` stores `waiters` as `Map<string, Array<(state) => void>>`. If multiple callers call `wait()` while a directory is pending (or when callers stop awaiting due to their own timeouts), resolver closures can accumulate until a `ready()`/`failed()` event arrives.
+
+In this app, `Worktree.wait()` is used inside a `Promise.race` with a timeout, so it is possible to create many resolvers that remain stored for a long time.
+
+This spec changes `wait()` to share a single promise per directory key (dedup), eliminating unbounded waiter arrays. Optionally, it prunes resolved state entries to keep the map small.
+
+---
+
+### Scoped files (parallel-safe)
+
+- `packages/app/src/utils/worktree.ts`
+
+---
+
+### Goals
+
+- Ensure there is at most one pending promise per directory key
+- Avoid accumulating arrays of resolver closures
+- Keep current API surface for callers (`Worktree.wait(directory)`)
+
+---
+
+### Non-goals
+
+- Adding abort/cancel APIs that require callsite changes
+- Changing UI behavior around worktree readiness
+
+---
+
+### Proposed approach
+
+1. Replace `waiters` with a single in-flight promise per key
+
+- Change:
+  - from: `Map<string, Array<(state: State) => void>>`
+  - to: `Map<string, { promise: Promise<State>; resolve: (state: State) => void }>`
+
+2. Implement `wait()` dedup
+
+- If state is present and not pending: return `Promise.resolve(state)`.
+- Else if there is an in-flight waiter entry: return its `promise`.
+- Else create and store a new `{ promise, resolve }`.
+
+3. Resolve and clear on `ready()` / `failed()`
+
+- When setting state to ready/failed:
+  - look up waiter entry
+  - delete it
+  - call `resolve(state)`
+
+4. (Optional) prune resolved state entries
+
+- Keep pending states.
+- Drop old ready/failed entries if `state.size` exceeds a small cap.
+
+---
+
+### Implementation steps
+
+1. Refactor `waiters` representation
+
+2. Update `Worktree.wait`, `Worktree.ready`, `Worktree.failed`
+
+3. Add small inline comments describing dedup semantics
+
+---
+
+### Acceptance criteria
+
+- Calling `Worktree.wait(directory)` repeatedly while pending does not grow `waiters` unbounded
+- `ready()` and `failed()` still resolve any in-flight waiter promise
+- Existing callsites continue to work without modification
+
+---
+
+### Validation plan
+
+- Manual (or small ad-hoc dev snippet):
+  - call `Worktree.pending(dir)`
+  - call `Worktree.wait(dir)` many times
+  - confirm only one waiter entry exists
+  - call `Worktree.ready(dir)` and confirm all awaiting callers resolve

+ 71 - 0
specs/17-permission-responded-bounds.md

@@ -0,0 +1,71 @@
+## Permission responded bounds
+
+Bound the in-memory `responded` set in PermissionProvider
+
+---
+
+### Summary
+
+`packages/app/src/context/permission.tsx` uses a module-local `responded = new Set<string>()` to prevent duplicate auto-responses for the same permission request ID. Entries are never cleared on success, so the set can grow without bound over a long-lived app session.
+
+This spec caps the size of this structure while preserving its purpose (dedupe in-flight/recent IDs).
+
+---
+
+### Scoped files (parallel-safe)
+
+- `packages/app/src/context/permission.tsx`
+
+---
+
+### Goals
+
+- Prevent unbounded growth of `responded`
+- Keep dedupe behavior for recent/in-flight permission IDs
+- Avoid touching other modules
+
+---
+
+### Non-goals
+
+- Changing permission auto-accept rules
+- Adding persistence for responded IDs
+
+---
+
+### Proposed approach
+
+- Replace `Set<string>` with an insertion-ordered `Map<string, number>` (timestamp) or keep `Set` but prune using insertion order by re-creating.
+- Add a cap constant, e.g. `MAX_RESPONDED = 1000`.
+- On `respondOnce(...)`:
+  - insert/update the ID (refresh recency)
+  - if size exceeds cap, delete oldest entries until within cap
+- Keep the existing `.catch(() => responded.delete(id))` behavior for request failures.
+
+Optional: add TTL pruning (e.g. drop entries older than 1 hour) when inserting.
+
+---
+
+### Implementation steps
+
+1. Introduce `MAX_RESPONDED` and a small `pruneResponded()` helper
+
+2. Update `respondOnce(...)` to refresh recency and prune
+
+3. Keep failure rollback behavior
+
+---
+
+### Acceptance criteria
+
+- `responded` never grows beyond `MAX_RESPONDED`
+- Auto-respond dedupe still works for repeated events for the same permission ID in a short window
+
+---
+
+### Validation plan
+
+- Manual:
+  - Simulate many permission requests (or mock by calling `respondOnce` in dev)
+  - Confirm the structure size stays capped
+  - Confirm duplicate events for the same permission ID do not send multiple responses