Jelajahi Sumber

fix gpt compaction issue

Dax Raad 3 bulan lalu
induk
melakukan
759635eefa

+ 6 - 3
packages/opencode/src/cli/cmd/tui/thread.ts

@@ -5,6 +5,7 @@ import { type rpc } from "./worker"
 import path from "path"
 import { UI } from "@/cli/ui"
 import { iife } from "@/util/iife"
+import { Log } from "@/util/log"
 
 declare global {
   const OPENCODE_WORKER_PATH: string
@@ -79,13 +80,15 @@ export const TuiThreadCommand = cmd({
         Object.entries(process.env).filter((entry): entry is [string, string] => entry[1] !== undefined),
       ),
     })
-    worker.onerror = console.error
+    worker.onerror = (e) => {
+      Log.Default.error(e)
+    }
     const client = Rpc.client<typeof rpc>(worker)
     process.on("uncaughtException", (e) => {
-      console.error(e)
+      Log.Default.error(e)
     })
     process.on("unhandledRejection", (e) => {
-      console.error(e)
+      Log.Default.error(e)
     })
     const server = await client.call("server", {
       port: args.port,

+ 40 - 4
packages/opencode/src/session/compaction.ts

@@ -1,4 +1,4 @@
-import { streamText, type ModelMessage } from "ai"
+import { streamText, wrapLanguageModel, type ModelMessage } from "ai"
 import { Session } from "."
 import { Identifier } from "../id/id"
 import { Instance } from "../project/instance"
@@ -129,10 +129,17 @@ export namespace SessionCompaction {
     })
     const result = await processor.process(() =>
       streamText({
+        onError(error) {
+          log.error("stream error", {
+            error,
+          })
+        },
         // set to 0, we handle loop
         maxRetries: 0,
-        model: model.language,
-        providerOptions: ProviderTransform.providerOptions(model.npm, model.providerID, model.info.options),
+        providerOptions: ProviderTransform.providerOptions(model.npm, model.providerID, {
+          ...ProviderTransform.options(model.providerID, model.modelID, model.npm ?? "", input.sessionID),
+          ...model.info.options,
+        }),
         headers: model.info.headers,
         abortSignal: input.abort,
         tools: model.info.tool_call ? {} : undefined,
@@ -143,7 +150,21 @@ export namespace SessionCompaction {
               content: x,
             }),
           ),
-          ...MessageV2.toModelMessage(input.messages),
+          ...MessageV2.toModelMessage(
+            input.messages.filter((m) => {
+              if (m.info.role !== "assistant" || m.info.error === undefined) {
+                return true
+              }
+              if (
+                MessageV2.AbortedError.isInstance(m.info.error) &&
+                m.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning")
+              ) {
+                return true
+              }
+
+              return false
+            }),
+          ),
           {
             role: "user",
             content: [
@@ -154,6 +175,20 @@ export namespace SessionCompaction {
             ],
           },
         ],
+        model: wrapLanguageModel({
+          model: model.language,
+          middleware: [
+            {
+              async transformParams(args) {
+                if (args.type === "stream") {
+                  // @ts-expect-error
+                  args.params.prompt = ProviderTransform.message(args.params.prompt, model.providerID, model.modelID)
+                }
+                return args.params
+              },
+            },
+          ],
+        }),
       }),
     )
     if (result === "continue") {
@@ -180,6 +215,7 @@ export namespace SessionCompaction {
         },
       })
     }
+    if (processor.message.error) return "stop"
     return "continue"
   }
 

+ 2 - 1
packages/opencode/src/session/prompt.ts

@@ -390,7 +390,7 @@ export namespace SessionPrompt {
 
       // pending compaction
       if (task?.type === "compaction") {
-        await SessionCompaction.process({
+        const result = await SessionCompaction.process({
           messages: msgs,
           parentID: lastUser.id,
           abort,
@@ -400,6 +400,7 @@ export namespace SessionPrompt {
           },
           sessionID,
         })
+        if (result === "stop") break
         continue
       }