2
0
Dax Raad 3 сар өмнө
parent
commit
75c29d4d1c

+ 21 - 12
packages/opencode/src/provider/provider.ts

@@ -658,20 +658,29 @@ export namespace Provider {
     }
 
     const provider = await state().then((state) => state.providers[providerID])
-    if (!provider) return
-    let priority = ["claude-haiku-4-5", "claude-haiku-4.5", "3-5-haiku", "3.5-haiku", "gemini-2.5-flash", "gpt-5-nano"]
-    // claude-haiku-4.5 is considered a premium model in github copilot, we shouldn't use premium requests for title gen
-    if (providerID === "github-copilot") {
-      priority = priority.filter((m) => m !== "claude-haiku-4.5")
-    }
-    if (providerID === "opencode" || providerID === "local") {
-      priority = ["gpt-5-nano"]
-    }
-    for (const item of priority) {
-      for (const model of Object.keys(provider.info.models)) {
-        if (model.includes(item)) return getModel(providerID, model)
+    if (provider) {
+      let priority = [
+        "claude-haiku-4-5",
+        "claude-haiku-4.5",
+        "3-5-haiku",
+        "3.5-haiku",
+        "gemini-2.5-flash",
+        "gpt-5-nano",
+      ]
+      // claude-haiku-4.5 is considered a premium model in github copilot, we shouldn't use premium requests for title gen
+      if (providerID === "github-copilot") {
+        priority = priority.filter((m) => m !== "claude-haiku-4.5")
+      }
+      if (providerID === "opencode" || providerID === "local") {
+        priority = ["gpt-5-nano"]
+      }
+      for (const item of priority) {
+        for (const model of Object.keys(provider.info.models)) {
+          if (model.includes(item)) return getModel(providerID, model)
+        }
       }
     }
+    return getModel("opencode", "gpt-5-nano")
   }
 
   const priority = ["gpt-5", "claude-sonnet-4", "big-pickle", "gemini-3-pro"]

+ 20 - 6
packages/opencode/src/provider/transform.ts

@@ -128,12 +128,7 @@ export namespace ProviderTransform {
     return undefined
   }
 
-  export function options(
-    providerID: string,
-    modelID: string,
-    npm: string,
-    sessionID: string,
-  ): Record<string, any> | undefined {
+  export function options(providerID: string, modelID: string, npm: string, sessionID: string): Record<string, any> {
     const result: Record<string, any> = {}
 
     // switch to providerID later, for now use this
@@ -175,6 +170,25 @@ export namespace ProviderTransform {
     return result
   }
 
+  export function smallOptions(input: { providerID: string; modelID: string }) {
+    const options: Record<string, any> = {}
+
+    if (input.providerID === "openai" || input.modelID.includes("gpt-5")) {
+      if (input.modelID.includes("5.1")) {
+        options["reasoningEffort"] = "low"
+      } else {
+        options["reasoningEffort"] = "minimal"
+      }
+    }
+    if (input.providerID === "google") {
+      options["thinkingConfig"] = {
+        thinkingBudget: 0,
+      }
+    }
+
+    return options
+  }
+
   export function providerOptions(npm: string | undefined, providerID: string, options: { [x: string]: any }) {
     switch (npm) {
       case "@ai-sdk/openai":

+ 11 - 5
packages/opencode/src/session/compaction.ts

@@ -15,6 +15,7 @@ import { Log } from "../util/log"
 import { ProviderTransform } from "@/provider/transform"
 import { SessionProcessor } from "./processor"
 import { fn } from "@/util/fn"
+import { mergeDeep, pipe } from "remeda"
 
 export namespace SessionCompaction {
   const log = Log.create({ service: "session.compaction" })
@@ -96,7 +97,7 @@ export namespace SessionCompaction {
     abort: AbortSignal
   }) {
     const model = await Provider.getModel(input.model.providerID, input.model.modelID)
-    const system = [...SystemPrompt.summarize(model.providerID)]
+    const system = [...SystemPrompt.compaction(model.providerID)]
     const msg = (await Session.updateMessage({
       id: Identifier.ascending("message"),
       role: "assistant",
@@ -137,10 +138,15 @@ export namespace SessionCompaction {
         },
         // set to 0, we handle loop
         maxRetries: 0,
-        providerOptions: ProviderTransform.providerOptions(model.npm, model.providerID, {
-          ...ProviderTransform.options(model.providerID, model.modelID, model.npm ?? "", input.sessionID),
-          ...model.info.options,
-        }),
+        providerOptions: ProviderTransform.providerOptions(
+          model.npm,
+          model.providerID,
+          pipe(
+            {},
+            mergeDeep(ProviderTransform.options(model.providerID, model.modelID, model.npm ?? "", input.sessionID)),
+            mergeDeep(model.info.options),
+          ),
+        ),
         headers: model.info.headers,
         abortSignal: input.abort,
         tools: model.info.tool_call ? {} : undefined,

+ 13 - 24
packages/opencode/src/session/prompt.ts

@@ -489,11 +489,12 @@ export namespace SessionPrompt {
             ? (agent.temperature ?? ProviderTransform.temperature(model.providerID, model.modelID))
             : undefined,
           topP: agent.topP ?? ProviderTransform.topP(model.providerID, model.modelID),
-          options: {
-            ...ProviderTransform.options(model.providerID, model.modelID, model.npm ?? "", sessionID),
-            ...model.info.options,
-            ...agent.options,
-          },
+          options: pipe(
+            {},
+            mergeDeep(ProviderTransform.options(model.providerID, model.modelID, model.npm ?? "", sessionID)),
+            mergeDeep(model.info.options),
+            mergeDeep(agent.options),
+          ),
         },
       )
 
@@ -1384,7 +1385,6 @@ export namespace SessionPrompt {
     return result
   }
 
-  // TODO: wire this back up
   async function ensureTitle(input: {
     session: Session.Info
     message: MessageV2.WithParts
@@ -1398,24 +1398,13 @@ export namespace SessionPrompt {
       input.history.filter((m) => m.info.role === "user" && !m.parts.every((p) => "synthetic" in p && p.synthetic))
         .length === 1
     if (!isFirst) return
-    const small =
-      (await Provider.getSmallModel(input.providerID)) ?? (await Provider.getModel(input.providerID, input.modelID))
-    const options = {
-      ...ProviderTransform.options(small.providerID, small.modelID, small.npm ?? "", input.session.id),
-      ...small.info.options,
-    }
-    if (small.providerID === "openai" || small.modelID.includes("gpt-5")) {
-      if (small.modelID.includes("5.1")) {
-        options["reasoningEffort"] = "low"
-      } else {
-        options["reasoningEffort"] = "minimal"
-      }
-    }
-    if (small.providerID === "google") {
-      options["thinkingConfig"] = {
-        thinkingBudget: 0,
-      }
-    }
+    const small = await Provider.getSmallModel(input.providerID)
+    const options = pipe(
+      {},
+      mergeDeep(ProviderTransform.options(small.providerID, small.modelID, small.npm ?? "", input.session.id)),
+      mergeDeep(ProviderTransform.smallOptions({ providerID: small.providerID, modelID: small.modelID })),
+      mergeDeep(small.info.options),
+    )
     await generateText({
       maxOutputTokens: small.info.reasoning ? 1500 : 20,
       providerOptions: ProviderTransform.providerOptions(small.npm, small.providerID, options),

+ 10 - 0
packages/opencode/src/session/prompt/compaction.txt

@@ -0,0 +1,10 @@
+You are a helpful AI assistant tasked with summarizing conversations.
+
+When asked to summarize, provide a detailed but concise summary of the conversation. 
+Focus on information that would be helpful for continuing the conversation, including:
+- What was done
+- What is currently being worked on
+- Which files are being modified
+- What needs to be done next
+
+Your summary should be comprehensive enough to provide context but concise enough to be quickly understood.

+ 0 - 5
packages/opencode/src/session/prompt/summarize-turn.txt

@@ -1,5 +0,0 @@
-Your job is to generate a summary of what happened in this conversation and why.
-
-Keep the results to 2-3 sentences.
-
-Output the message summary now:

+ 4 - 10
packages/opencode/src/session/prompt/summarize.txt

@@ -1,10 +1,4 @@
-You are a helpful AI assistant tasked with summarizing conversations.
-
-When asked to summarize, provide a detailed but concise summary of the conversation. 
-Focus on information that would be helpful for continuing the conversation, including:
-- What was done
-- What is currently being worked on
-- Which files are being modified
-- What needs to be done next
-
-Your summary should be comprehensive enough to provide context but concise enough to be quickly understood.
+Summarize the following conversation into 2 sentences MAX explaining what the
+assistant did and why
+Do not explain the user's input.
+Do not speak in the third person about the assistant.

+ 24 - 8
packages/opencode/src/session/summary.ts

@@ -13,6 +13,7 @@ import path from "path"
 import { Instance } from "@/project/instance"
 import { Storage } from "@/storage/storage"
 import { Bus } from "@/bus"
+import { mergeDeep, pipe } from "remeda"
 
 export namespace SessionSummary {
   const log = Log.create({ service: "session.summary" })
@@ -73,13 +74,18 @@ export namespace SessionSummary {
 
     const assistantMsg = messages.find((m) => m.info.role === "assistant")!.info as MessageV2.Assistant
     const small = await Provider.getSmallModel(assistantMsg.providerID)
-    if (!small) return
+    const options = pipe(
+      {},
+      mergeDeep(ProviderTransform.options(small.providerID, small.modelID, small.npm ?? "", assistantMsg.sessionID)),
+      mergeDeep(ProviderTransform.smallOptions({ providerID: small.providerID, modelID: small.modelID })),
+      mergeDeep(small.info.options),
+    )
 
     const textPart = msgWithParts.parts.find((p) => p.type === "text" && !p.synthetic) as MessageV2.TextPart
     if (textPart && !userMsg.summary?.title) {
       const result = await generateText({
         maxOutputTokens: small.info.reasoning ? 1500 : 20,
-        providerOptions: ProviderTransform.providerOptions(small.npm, small.providerID, {}),
+        providerOptions: ProviderTransform.providerOptions(small.npm, small.providerID, options),
         messages: [
           ...SystemPrompt.title(small.providerID).map(
             (x): ModelMessage => ({
@@ -115,18 +121,28 @@ export namespace SessionSummary {
         .findLast((m) => m.info.role === "assistant")
         ?.parts.findLast((p) => p.type === "text")?.text
       if (!summary || diffs.length > 0) {
+        for (const msg of messages) {
+          for (const part of msg.parts) {
+            if (part.type === "tool" && part.state.status === "completed") {
+              part.state.output = "[TOOL OUTPUT PRUNED]"
+            }
+          }
+        }
         const result = await generateText({
           model: small.language,
           maxOutputTokens: 100,
+          providerOptions: ProviderTransform.providerOptions(small.npm, small.providerID, options),
           messages: [
+            ...SystemPrompt.summarize(small.providerID).map(
+              (x): ModelMessage => ({
+                role: "system",
+                content: x,
+              }),
+            ),
+            ...MessageV2.toModelMessage(messages),
             {
               role: "user",
-              content: `
-            Summarize the following conversation into 2 sentences MAX explaining what the assistant did and why. Do not explain the user's input. Do not speak in the third person about the assistant.
-            <conversation>
-            ${JSON.stringify(MessageV2.toModelMessage(messages))}
-            </conversation>
-            `,
+              content: `Summarize the above conversation according to your system prompts.`,
             },
           ],
           headers: small.info.headers,

+ 10 - 0
packages/opencode/src/session/system.ts

@@ -13,6 +13,7 @@ import PROMPT_POLARIS from "./prompt/polaris.txt"
 import PROMPT_BEAST from "./prompt/beast.txt"
 import PROMPT_GEMINI from "./prompt/gemini.txt"
 import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt"
+import PROMPT_COMPACTION from "./prompt/compaction.txt"
 import PROMPT_SUMMARIZE from "./prompt/summarize.txt"
 import PROMPT_TITLE from "./prompt/title.txt"
 import PROMPT_CODEX from "./prompt/codex.txt"
@@ -116,6 +117,15 @@ export namespace SystemPrompt {
     return Promise.all(found).then((result) => result.filter(Boolean))
   }
 
+  export function compaction(providerID: string) {
+    switch (providerID) {
+      case "anthropic":
+        return [PROMPT_ANTHROPIC_SPOOF.trim(), PROMPT_COMPACTION]
+      default:
+        return [PROMPT_COMPACTION]
+    }
+  }
+
   export function summarize(providerID: string) {
     switch (providerID) {
       case "anthropic":