Просмотр исходного кода

LLM cleanup (#5462)

Co-authored-by: GitHub Action <[email protected]>
Co-authored-by: Aiden Cline <[email protected]>
Dax 3 месяцев назад
Родитель
Сommit
fed4776451

+ 1 - 0
.opencode/opencode.jsonc

@@ -10,4 +10,5 @@
       "options": {},
       "options": {},
     },
     },
   },
   },
+  "mcp": {},
 }
 }

+ 1 - 1
packages/desktop/src/context/local.tsx

@@ -78,7 +78,7 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
     })
     })
 
 
     const agent = (() => {
     const agent = (() => {
-      const list = createMemo(() => sync.data.agent.filter((x) => x.mode !== "subagent"))
+      const list = createMemo(() => sync.data.agent.filter((x) => x.mode !== "subagent" && !x.hidden))
       const [store, setStore] = createStore<{
       const [store, setStore] = createStore<{
         current: string
         current: string
       }>({
       }>({

+ 47 - 27
packages/opencode/src/agent/agent.ts

@@ -2,18 +2,24 @@ import { Config } from "../config/config"
 import z from "zod"
 import z from "zod"
 import { Provider } from "../provider/provider"
 import { Provider } from "../provider/provider"
 import { generateObject, type ModelMessage } from "ai"
 import { generateObject, type ModelMessage } from "ai"
-import PROMPT_GENERATE from "./generate.txt"
 import { SystemPrompt } from "../session/system"
 import { SystemPrompt } from "../session/system"
 import { Instance } from "../project/instance"
 import { Instance } from "../project/instance"
 import { mergeDeep } from "remeda"
 import { mergeDeep } from "remeda"
 
 
+import PROMPT_GENERATE from "./generate.txt"
+import PROMPT_COMPACTION from "./prompt/compaction.txt"
+import PROMPT_EXPLORE from "./prompt/explore.txt"
+import PROMPT_SUMMARY from "./prompt/summary.txt"
+import PROMPT_TITLE from "./prompt/title.txt"
+
 export namespace Agent {
 export namespace Agent {
   export const Info = z
   export const Info = z
     .object({
     .object({
       name: z.string(),
       name: z.string(),
       description: z.string().optional(),
       description: z.string().optional(),
       mode: z.enum(["subagent", "primary", "all"]),
       mode: z.enum(["subagent", "primary", "all"]),
-      builtIn: z.boolean(),
+      native: z.boolean().optional(),
+      hidden: z.boolean().optional(),
       topP: z.number().optional(),
       topP: z.number().optional(),
       temperature: z.number().optional(),
       temperature: z.number().optional(),
       color: z.string().optional(),
       color: z.string().optional(),
@@ -112,7 +118,8 @@ export namespace Agent {
         options: {},
         options: {},
         permission: agentPermission,
         permission: agentPermission,
         mode: "subagent",
         mode: "subagent",
-        builtIn: true,
+        native: true,
+        hidden: true,
       },
       },
       explore: {
       explore: {
         name: "explore",
         name: "explore",
@@ -124,30 +131,23 @@ export namespace Agent {
           ...defaultTools,
           ...defaultTools,
         },
         },
         description: `Fast agent specialized for exploring codebases. Use this when you need to quickly find files by patterns (eg. "src/components/**/*.tsx"), search code for keywords (eg. "API endpoints"), or answer questions about the codebase (eg. "how do API endpoints work?"). When calling this agent, specify the desired thoroughness level: "quick" for basic searches, "medium" for moderate exploration, or "very thorough" for comprehensive analysis across multiple locations and naming conventions.`,
         description: `Fast agent specialized for exploring codebases. Use this when you need to quickly find files by patterns (eg. "src/components/**/*.tsx"), search code for keywords (eg. "API endpoints"), or answer questions about the codebase (eg. "how do API endpoints work?"). When calling this agent, specify the desired thoroughness level: "quick" for basic searches, "medium" for moderate exploration, or "very thorough" for comprehensive analysis across multiple locations and naming conventions.`,
-        prompt: [
-          `You are a file search specialist. You excel at thoroughly navigating and exploring codebases.`,
-          ``,
-          `Your strengths:`,
-          `- Rapidly finding files using glob patterns`,
-          `- Searching code and text with powerful regex patterns`,
-          `- Reading and analyzing file contents`,
-          ``,
-          `Guidelines:`,
-          `- Use Glob for broad file pattern matching`,
-          `- Use Grep for searching file contents with regex`,
-          `- Use Read when you know the specific file path you need to read`,
-          `- Use Bash for file operations like copying, moving, or listing directory contents`,
-          `- Adapt your search approach based on the thoroughness level specified by the caller`,
-          `- Return file paths as absolute paths in your final response`,
-          `- For clear communication, avoid using emojis`,
-          `- Do not create any files, or run bash commands that modify the user's system state in any way`,
-          ``,
-          `Complete the user's search request efficiently and report your findings clearly.`,
-        ].join("\n"),
+        prompt: PROMPT_EXPLORE,
         options: {},
         options: {},
         permission: agentPermission,
         permission: agentPermission,
         mode: "subagent",
         mode: "subagent",
-        builtIn: true,
+        native: true,
+      },
+      compaction: {
+        name: "compaction",
+        mode: "primary",
+        native: true,
+        hidden: true,
+        prompt: PROMPT_COMPACTION,
+        tools: {
+          "*": false,
+        },
+        options: {},
+        permission: agentPermission,
       },
       },
       build: {
       build: {
         name: "build",
         name: "build",
@@ -155,7 +155,27 @@ export namespace Agent {
         options: {},
         options: {},
         permission: agentPermission,
         permission: agentPermission,
         mode: "primary",
         mode: "primary",
-        builtIn: true,
+        native: true,
+      },
+      title: {
+        name: "title",
+        mode: "primary",
+        options: {},
+        native: true,
+        hidden: true,
+        permission: agentPermission,
+        prompt: PROMPT_TITLE,
+        tools: {},
+      },
+      summary: {
+        name: "summary",
+        mode: "primary",
+        options: {},
+        native: true,
+        hidden: true,
+        permission: agentPermission,
+        prompt: PROMPT_SUMMARY,
+        tools: {},
       },
       },
       plan: {
       plan: {
         name: "plan",
         name: "plan",
@@ -165,7 +185,7 @@ export namespace Agent {
           ...defaultTools,
           ...defaultTools,
         },
         },
         mode: "primary",
         mode: "primary",
-        builtIn: true,
+        native: true,
       },
       },
     }
     }
     for (const [key, value] of Object.entries(cfg.agent ?? {})) {
     for (const [key, value] of Object.entries(cfg.agent ?? {})) {
@@ -181,7 +201,7 @@ export namespace Agent {
           permission: agentPermission,
           permission: agentPermission,
           options: {},
           options: {},
           tools: {},
           tools: {},
-          builtIn: false,
+          native: false,
         }
         }
       const {
       const {
         name,
         name,

+ 0 - 0
packages/opencode/src/session/prompt/compaction.txt → packages/opencode/src/agent/prompt/compaction.txt


+ 18 - 0
packages/opencode/src/agent/prompt/explore.txt

@@ -0,0 +1,18 @@
+You are a file search specialist. You excel at thoroughly navigating and exploring codebases.
+
+Your strengths:
+- Rapidly finding files using glob patterns
+- Searching code and text with powerful regex patterns
+- Reading and analyzing file contents
+
+Guidelines:
+- Use Glob for broad file pattern matching
+- Use Grep for searching file contents with regex
+- Use Read when you know the specific file path you need to read
+- Use Bash for file operations like copying, moving, or listing directory contents
+- Adapt your search approach based on the thoroughness level specified by the caller
+- Return file paths as absolute paths in your final response
+- For clear communication, avoid using emojis
+- Do not create any files, or run bash commands that modify the user's system state in any way
+
+Complete the user's search request efficiently and report your findings clearly.

+ 0 - 0
packages/opencode/src/session/prompt/summarize.txt → packages/opencode/src/agent/prompt/summary.txt


+ 2 - 2
packages/opencode/src/session/prompt/title.txt → packages/opencode/src/agent/prompt/title.txt

@@ -22,8 +22,8 @@ Your output must be:
 - The title should NEVER include "summarizing" or "generating" when generating a title
 - The title should NEVER include "summarizing" or "generating" when generating a title
 - DO NOT SAY YOU CANNOT GENERATE A TITLE OR COMPLAIN ABOUT THE INPUT
 - DO NOT SAY YOU CANNOT GENERATE A TITLE OR COMPLAIN ABOUT THE INPUT
 - Always output something meaningful, even if the input is minimal.
 - Always output something meaningful, even if the input is minimal.
-- If the user message is short or conversational (e.g. “hello”, “lol”, “whats up”, “hey”):
-  → create a title that reflects the users tone or intent (such as Greeting, Quick check-in, Light chat, Intro message, etc.)
+- If the user message is short or conversational (e.g. "hello", "lol", "whats up", "hey"):
+  → create a title that reflects the user's tone or intent (such as Greeting, Quick check-in, Light chat, Intro message, etc.)
 </rules>
 </rules>
 
 
 <examples>
 <examples>

+ 2 - 2
packages/opencode/src/cli/cmd/agent.ts

@@ -227,8 +227,8 @@ const AgentListCommand = cmd({
       async fn() {
       async fn() {
         const agents = await Agent.list()
         const agents = await Agent.list()
         const sortedAgents = agents.sort((a, b) => {
         const sortedAgents = agents.sort((a, b) => {
-          if (a.builtIn !== b.builtIn) {
-            return a.builtIn ? -1 : 1
+          if (a.native !== b.native) {
+            return a.native ? -1 : 1
           }
           }
           return a.name.localeCompare(b.name)
           return a.name.localeCompare(b.name)
         })
         })

+ 1 - 1
packages/opencode/src/cli/cmd/tui/component/dialog-agent.tsx

@@ -12,7 +12,7 @@ export function DialogAgent() {
       return {
       return {
         value: item.name,
         value: item.name,
         title: item.name,
         title: item.name,
-        description: item.builtIn ? "native" : item.description,
+        description: item.native ? "native" : item.description,
       }
       }
     }),
     }),
   )
   )

+ 1 - 1
packages/opencode/src/cli/cmd/tui/component/prompt/autocomplete.tsx

@@ -184,7 +184,7 @@ export function Autocomplete(props: {
   const agents = createMemo(() => {
   const agents = createMemo(() => {
     const agents = sync.data.agent
     const agents = sync.data.agent
     return agents
     return agents
-      .filter((agent) => !agent.builtIn && agent.mode !== "primary")
+      .filter((agent) => !agent.hidden && agent.mode !== "primary")
       .map(
       .map(
         (agent): AutocompleteOption => ({
         (agent): AutocompleteOption => ({
           display: "@" + agent.name,
           display: "@" + agent.name,

+ 1 - 1
packages/opencode/src/cli/cmd/tui/context/local.tsx

@@ -52,7 +52,7 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
     })
     })
 
 
     const agent = iife(() => {
     const agent = iife(() => {
-      const agents = createMemo(() => sync.data.agent.filter((x) => x.mode !== "subagent"))
+      const agents = createMemo(() => sync.data.agent.filter((x) => x.mode !== "subagent" && !x.hidden))
       const [agentStore, setAgentStore] = createStore<{
       const [agentStore, setAgentStore] = createStore<{
         current: string
         current: string
       }>({
       }>({

+ 1 - 1
packages/opencode/src/provider/provider.ts

@@ -858,7 +858,7 @@ export namespace Provider {
     return info
     return info
   }
   }
 
 
-  export async function getLanguage(model: Model) {
+  export async function getLanguage(model: Model): Promise<LanguageModelV2> {
     const s = await state()
     const s = await state()
     const key = `${model.providerID}/${model.id}`
     const key = `${model.providerID}/${model.id}`
     if (s.models.has(key)) return s.models.get(key)!
     if (s.models.has(key)) return s.models.get(key)!

+ 21 - 75
packages/opencode/src/session/compaction.ts

@@ -1,22 +1,18 @@
 import { BusEvent } from "@/bus/bus-event"
 import { BusEvent } from "@/bus/bus-event"
 import { Bus } from "@/bus"
 import { Bus } from "@/bus"
-import { wrapLanguageModel, type ModelMessage } from "ai"
 import { Session } from "."
 import { Session } from "."
 import { Identifier } from "../id/id"
 import { Identifier } from "../id/id"
 import { Instance } from "../project/instance"
 import { Instance } from "../project/instance"
 import { Provider } from "../provider/provider"
 import { Provider } from "../provider/provider"
 import { MessageV2 } from "./message-v2"
 import { MessageV2 } from "./message-v2"
-import { SystemPrompt } from "./system"
 import z from "zod"
 import z from "zod"
 import { SessionPrompt } from "./prompt"
 import { SessionPrompt } from "./prompt"
 import { Flag } from "../flag/flag"
 import { Flag } from "../flag/flag"
 import { Token } from "../util/token"
 import { Token } from "../util/token"
-import { Config } from "../config/config"
 import { Log } from "../util/log"
 import { Log } from "../util/log"
-import { ProviderTransform } from "@/provider/transform"
 import { SessionProcessor } from "./processor"
 import { SessionProcessor } from "./processor"
 import { fn } from "@/util/fn"
 import { fn } from "@/util/fn"
-import { mergeDeep, pipe } from "remeda"
+import { Agent } from "@/agent/agent"
 
 
 export namespace SessionCompaction {
 export namespace SessionCompaction {
   const log = Log.create({ service: "session.compaction" })
   const log = Log.create({ service: "session.compaction" })
@@ -90,24 +86,21 @@ export namespace SessionCompaction {
     parentID: string
     parentID: string
     messages: MessageV2.WithParts[]
     messages: MessageV2.WithParts[]
     sessionID: string
     sessionID: string
-    model: {
-      providerID: string
-      modelID: string
-    }
-    agent: string
     abort: AbortSignal
     abort: AbortSignal
     auto: boolean
     auto: boolean
   }) {
   }) {
-    const cfg = await Config.get()
-    const model = await Provider.getModel(input.model.providerID, input.model.modelID)
-    const language = await Provider.getLanguage(model)
-    const system = [...SystemPrompt.compaction(model.providerID)]
+    const userMessage = input.messages.findLast((m) => m.info.id === input.parentID)!.info as MessageV2.User
+    const agent = await Agent.get("compaction")
+    const model = agent.model
+      ? await Provider.getModel(agent.model.providerID, agent.model.modelID)
+      : await Provider.getModel(userMessage.model.providerID, userMessage.model.modelID)
     const msg = (await Session.updateMessage({
     const msg = (await Session.updateMessage({
       id: Identifier.ascending("message"),
       id: Identifier.ascending("message"),
       role: "assistant",
       role: "assistant",
       parentID: input.parentID,
       parentID: input.parentID,
       sessionID: input.sessionID,
       sessionID: input.sessionID,
-      mode: input.agent,
+      mode: "compaction",
+      agent: "compaction",
       summary: true,
       summary: true,
       path: {
       path: {
         cwd: Instance.directory,
         cwd: Instance.directory,
@@ -120,7 +113,7 @@ export namespace SessionCompaction {
         reasoning: 0,
         reasoning: 0,
         cache: { read: 0, write: 0 },
         cache: { read: 0, write: 0 },
       },
       },
-      modelID: input.model.modelID,
+      modelID: model.id,
       providerID: model.providerID,
       providerID: model.providerID,
       time: {
       time: {
         created: Date.now(),
         created: Date.now(),
@@ -129,46 +122,18 @@ export namespace SessionCompaction {
     const processor = SessionProcessor.create({
     const processor = SessionProcessor.create({
       assistantMessage: msg,
       assistantMessage: msg,
       sessionID: input.sessionID,
       sessionID: input.sessionID,
-      model: model,
+      model,
       abort: input.abort,
       abort: input.abort,
     })
     })
     const result = await processor.process({
     const result = await processor.process({
-      onError(error) {
-        log.error("stream error", {
-          error,
-        })
-      },
-      // set to 0, we handle loop
-      maxRetries: 0,
-      providerOptions: ProviderTransform.providerOptions(
-        model,
-        pipe({}, mergeDeep(ProviderTransform.options(model, input.sessionID)), mergeDeep(model.options)),
-      ),
-      headers: model.headers,
-      abortSignal: input.abort,
-      tools: model.capabilities.toolcall ? {} : undefined,
+      user: userMessage,
+      agent,
+      abort: input.abort,
+      sessionID: input.sessionID,
+      tools: {},
+      system: [],
       messages: [
       messages: [
-        ...system.map(
-          (x): ModelMessage => ({
-            role: "system",
-            content: x,
-          }),
-        ),
-        ...MessageV2.toModelMessage(
-          input.messages.filter((m) => {
-            if (m.info.role !== "assistant" || m.info.error === undefined) {
-              return true
-            }
-            if (
-              MessageV2.AbortedError.isInstance(m.info.error) &&
-              m.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning")
-            ) {
-              return true
-            }
-
-            return false
-          }),
-        ),
+        ...MessageV2.toModelMessage(input.messages),
         {
         {
           role: "user",
           role: "user",
           content: [
           content: [
@@ -179,28 +144,9 @@ export namespace SessionCompaction {
           ],
           ],
         },
         },
       ],
       ],
-      model: wrapLanguageModel({
-        model: language,
-        middleware: [
-          {
-            async transformParams(args) {
-              if (args.type === "stream") {
-                // @ts-expect-error
-                args.params.prompt = ProviderTransform.message(args.params.prompt, model)
-              }
-              return args.params
-            },
-          },
-        ],
-      }),
-      experimental_telemetry: {
-        isEnabled: cfg.experimental?.openTelemetry,
-        metadata: {
-          userId: cfg.username ?? "unknown",
-          sessionId: input.sessionID,
-        },
-      },
+      model,
     })
     })
+
     if (result === "continue" && input.auto) {
     if (result === "continue" && input.auto) {
       const continueMsg = await Session.updateMessage({
       const continueMsg = await Session.updateMessage({
         id: Identifier.ascending("message"),
         id: Identifier.ascending("message"),
@@ -209,8 +155,8 @@ export namespace SessionCompaction {
         time: {
         time: {
           created: Date.now(),
           created: Date.now(),
         },
         },
-        agent: input.agent,
-        model: input.model,
+        agent: userMessage.agent,
+        model: userMessage.model,
       })
       })
       await Session.updatePart({
       await Session.updatePart({
         id: Identifier.ascending("part"),
         id: Identifier.ascending("part"),

+ 184 - 0
packages/opencode/src/session/llm.ts

@@ -0,0 +1,184 @@
+import { Provider } from "@/provider/provider"
+import { Log } from "@/util/log"
+import { streamText, wrapLanguageModel, type ModelMessage, type StreamTextResult, type Tool, type ToolSet } from "ai"
+import { mergeDeep, pipe } from "remeda"
+import { ProviderTransform } from "@/provider/transform"
+import { Config } from "@/config/config"
+import { Instance } from "@/project/instance"
+import type { Agent } from "@/agent/agent"
+import type { MessageV2 } from "./message-v2"
+import { Plugin } from "@/plugin"
+import { SystemPrompt } from "./system"
+import { ToolRegistry } from "@/tool/registry"
+import { Flag } from "@/flag/flag"
+
+export namespace LLM {
+  const log = Log.create({ service: "llm" })
+
+  export const OUTPUT_TOKEN_MAX = 32_000
+
+  export type StreamInput = {
+    user: MessageV2.User
+    sessionID: string
+    model: Provider.Model
+    agent: Agent.Info
+    system: string[]
+    abort: AbortSignal
+    messages: ModelMessage[]
+    small?: boolean
+    tools: Record<string, Tool>
+    retries?: number
+  }
+
+  export type StreamOutput = StreamTextResult<ToolSet, unknown>
+
+  export async function stream(input: StreamInput) {
+    const l = log
+      .clone()
+      .tag("providerID", input.model.providerID)
+      .tag("modelID", input.model.id)
+      .tag("sessionID", input.sessionID)
+      .tag("small", (input.small ?? false).toString())
+      .tag("agent", input.agent.name)
+    l.info("stream", {
+      modelID: input.model.id,
+      providerID: input.model.providerID,
+    })
+    const [language, cfg] = await Promise.all([Provider.getLanguage(input.model), Config.get()])
+
+    const system = SystemPrompt.header(input.model.providerID)
+    system.push(
+      [
+        // use agent prompt otherwise provider prompt
+        ...(input.agent.prompt ? [input.agent.prompt] : SystemPrompt.provider(input.model)),
+        // any custom prompt passed into this call
+        ...input.system,
+        // any custom prompt from last user message
+        ...(input.user.system ? [input.user.system] : []),
+      ]
+        .filter((x) => x)
+        .join("\n"),
+    )
+
+    const params = await Plugin.trigger(
+      "chat.params",
+      {
+        sessionID: input.sessionID,
+        agent: input.agent,
+        model: input.model,
+        provider: Provider.getProvider(input.model.providerID),
+        message: input.user,
+      },
+      {
+        temperature: input.model.capabilities.temperature
+          ? (input.agent.temperature ?? ProviderTransform.temperature(input.model))
+          : undefined,
+        topP: input.agent.topP ?? ProviderTransform.topP(input.model),
+        options: pipe(
+          {},
+          mergeDeep(ProviderTransform.options(input.model, input.sessionID)),
+          input.small ? mergeDeep(ProviderTransform.smallOptions(input.model)) : mergeDeep({}),
+          mergeDeep(input.model.options),
+          mergeDeep(input.agent.options),
+        ),
+      },
+    )
+
+    l.info("params", {
+      params,
+    })
+
+    const maxOutputTokens = ProviderTransform.maxOutputTokens(
+      input.model.api.npm,
+      params.options,
+      input.model.limit.output,
+      OUTPUT_TOKEN_MAX,
+    )
+
+    const tools = await resolveTools(input)
+
+    return streamText({
+      onError(error) {
+        l.error("stream error", {
+          error,
+        })
+      },
+      async experimental_repairToolCall(failed) {
+        const lower = failed.toolCall.toolName.toLowerCase()
+        if (lower !== failed.toolCall.toolName && tools[lower]) {
+          l.info("repairing tool call", {
+            tool: failed.toolCall.toolName,
+            repaired: lower,
+          })
+          return {
+            ...failed.toolCall,
+            toolName: lower,
+          }
+        }
+        return {
+          ...failed.toolCall,
+          input: JSON.stringify({
+            tool: failed.toolCall.toolName,
+            error: failed.error.message,
+          }),
+          toolName: "invalid",
+        }
+      },
+      temperature: params.temperature,
+      topP: params.topP,
+      providerOptions: ProviderTransform.providerOptions(input.model, params.options),
+      activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
+      tools,
+      maxOutputTokens,
+      abortSignal: input.abort,
+      headers: {
+        ...(input.model.providerID.startsWith("opencode")
+          ? {
+              "x-opencode-project": Instance.project.id,
+              "x-opencode-session": input.sessionID,
+              "x-opencode-request": input.user.id,
+              "x-opencode-client": Flag.OPENCODE_CLIENT,
+            }
+          : undefined),
+        ...input.model.headers,
+      },
+      maxRetries: input.retries ?? 0,
+      messages: [
+        ...system.map(
+          (x): ModelMessage => ({
+            role: "system",
+            content: x,
+          }),
+        ),
+        ...input.messages,
+      ],
+      model: wrapLanguageModel({
+        model: language,
+        middleware: [
+          {
+            async transformParams(args) {
+              if (args.type === "stream") {
+                // @ts-expect-error
+                args.params.prompt = ProviderTransform.message(args.params.prompt, input.model)
+              }
+              return args.params
+            },
+          },
+        ],
+      }),
+      experimental_telemetry: { isEnabled: cfg.experimental?.openTelemetry },
+    })
+  }
+
+  async function resolveTools(input: Pick<StreamInput, "tools" | "agent" | "user">) {
+    const enabled = pipe(
+      input.agent.tools,
+      mergeDeep(await ToolRegistry.enabled(input.agent)),
+      mergeDeep(input.user.tools ?? {}),
+    )
+    for (const [key, value] of Object.entries(enabled)) {
+      if (value === false) delete input.tools[key]
+    }
+    return input.tools
+  }
+}

+ 14 - 6
packages/opencode/src/session/message-v2.ts

@@ -348,7 +348,11 @@ export namespace MessageV2 {
     parentID: z.string(),
     parentID: z.string(),
     modelID: z.string(),
     modelID: z.string(),
     providerID: z.string(),
     providerID: z.string(),
+    /**
+     * @deprecated
+     */
     mode: z.string(),
     mode: z.string(),
+    agent: z.string(),
     path: z.object({
     path: z.object({
       cwd: z.string(),
       cwd: z.string(),
       root: z.string(),
       root: z.string(),
@@ -412,12 +416,7 @@ export namespace MessageV2 {
   })
   })
   export type WithParts = z.infer<typeof WithParts>
   export type WithParts = z.infer<typeof WithParts>
 
 
-  export function toModelMessage(
-    input: {
-      info: Info
-      parts: Part[]
-    }[],
-  ): ModelMessage[] {
+  export function toModelMessage(input: WithParts[]): ModelMessage[] {
     const result: UIMessage[] = []
     const result: UIMessage[] = []
 
 
     for (const msg of input) {
     for (const msg of input) {
@@ -461,6 +460,15 @@ export namespace MessageV2 {
       }
       }
 
 
       if (msg.info.role === "assistant") {
       if (msg.info.role === "assistant") {
+        if (
+          msg.info.error &&
+          !(
+            MessageV2.AbortedError.isInstance(msg.info.error) &&
+            msg.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning")
+          )
+        ) {
+          continue
+        }
         const assistantMessage: UIMessage = {
         const assistantMessage: UIMessage = {
           id: msg.info.id,
           id: msg.info.id,
           role: "assistant",
           role: "assistant",

+ 3 - 12
packages/opencode/src/session/processor.ts

@@ -1,5 +1,4 @@
 import { MessageV2 } from "./message-v2"
 import { MessageV2 } from "./message-v2"
-import { streamText } from "ai"
 import { Log } from "@/util/log"
 import { Log } from "@/util/log"
 import { Identifier } from "@/id/id"
 import { Identifier } from "@/id/id"
 import { Session } from "."
 import { Session } from "."
@@ -12,6 +11,7 @@ import { SessionRetry } from "./retry"
 import { SessionStatus } from "./status"
 import { SessionStatus } from "./status"
 import { Plugin } from "@/plugin"
 import { Plugin } from "@/plugin"
 import type { Provider } from "@/provider/provider"
 import type { Provider } from "@/provider/provider"
+import { LLM } from "./llm"
 import { Config } from "@/config/config"
 import { Config } from "@/config/config"
 
 
 export namespace SessionProcessor {
 export namespace SessionProcessor {
@@ -21,15 +21,6 @@ export namespace SessionProcessor {
   export type Info = Awaited<ReturnType<typeof create>>
   export type Info = Awaited<ReturnType<typeof create>>
   export type Result = Awaited<ReturnType<Info["process"]>>
   export type Result = Awaited<ReturnType<Info["process"]>>
 
 
-  export type StreamInput = Parameters<typeof streamText>[0]
-
-  export type TBD = {
-    model: {
-      modelID: string
-      providerID: string
-    }
-  }
-
   export function create(input: {
   export function create(input: {
     assistantMessage: MessageV2.Assistant
     assistantMessage: MessageV2.Assistant
     sessionID: string
     sessionID: string
@@ -48,14 +39,14 @@ export namespace SessionProcessor {
       partFromToolCall(toolCallID: string) {
       partFromToolCall(toolCallID: string) {
         return toolcalls[toolCallID]
         return toolcalls[toolCallID]
       },
       },
-      async process(streamInput: StreamInput) {
+      async process(streamInput: LLM.StreamInput) {
         log.info("process")
         log.info("process")
         const shouldBreak = (await Config.get()).experimental?.continue_loop_on_deny !== true
         const shouldBreak = (await Config.get()).experimental?.continue_loop_on_deny !== true
         while (true) {
         while (true) {
           try {
           try {
             let currentText: MessageV2.TextPart | undefined
             let currentText: MessageV2.TextPart | undefined
             let reasoningMap: Record<string, MessageV2.ReasoningPart> = {}
             let reasoningMap: Record<string, MessageV2.ReasoningPart> = {}
-            const stream = streamText(streamInput)
+            const stream = await LLM.stream(streamInput)
 
 
             for await (const value of stream.fullStream) {
             for await (const value of stream.fullStream) {
               input.abort.throwIfAborted()
               input.abort.throwIfAborted()

+ 72 - 269
packages/opencode/src/session/prompt.ts

@@ -5,32 +5,22 @@ import z from "zod"
 import { Identifier } from "../id/id"
 import { Identifier } from "../id/id"
 import { MessageV2 } from "./message-v2"
 import { MessageV2 } from "./message-v2"
 import { Log } from "../util/log"
 import { Log } from "../util/log"
-import { Flag } from "../flag/flag"
 import { SessionRevert } from "./revert"
 import { SessionRevert } from "./revert"
 import { Session } from "."
 import { Session } from "."
 import { Agent } from "../agent/agent"
 import { Agent } from "../agent/agent"
 import { Provider } from "../provider/provider"
 import { Provider } from "../provider/provider"
-import {
-  generateText,
-  type ModelMessage,
-  type Tool as AITool,
-  tool,
-  wrapLanguageModel,
-  stepCountIs,
-  jsonSchema,
-} from "ai"
+import { type Tool as AITool, tool, jsonSchema } from "ai"
 import { SessionCompaction } from "./compaction"
 import { SessionCompaction } from "./compaction"
 import { Instance } from "../project/instance"
 import { Instance } from "../project/instance"
 import { Bus } from "../bus"
 import { Bus } from "../bus"
 import { ProviderTransform } from "../provider/transform"
 import { ProviderTransform } from "../provider/transform"
 import { SystemPrompt } from "./system"
 import { SystemPrompt } from "./system"
 import { Plugin } from "../plugin"
 import { Plugin } from "../plugin"
-
 import PROMPT_PLAN from "../session/prompt/plan.txt"
 import PROMPT_PLAN from "../session/prompt/plan.txt"
 import BUILD_SWITCH from "../session/prompt/build-switch.txt"
 import BUILD_SWITCH from "../session/prompt/build-switch.txt"
 import MAX_STEPS from "../session/prompt/max-steps.txt"
 import MAX_STEPS from "../session/prompt/max-steps.txt"
 import { defer } from "../util/defer"
 import { defer } from "../util/defer"
-import { clone, mergeDeep, pipe } from "remeda"
+import { mergeDeep, pipe } from "remeda"
 import { ToolRegistry } from "../tool/registry"
 import { ToolRegistry } from "../tool/registry"
 import { Wildcard } from "../util/wildcard"
 import { Wildcard } from "../util/wildcard"
 import { MCP } from "../mcp"
 import { MCP } from "../mcp"
@@ -44,12 +34,13 @@ import { Command } from "../command"
 import { $, fileURLToPath } from "bun"
 import { $, fileURLToPath } from "bun"
 import { ConfigMarkdown } from "../config/markdown"
 import { ConfigMarkdown } from "../config/markdown"
 import { SessionSummary } from "./summary"
 import { SessionSummary } from "./summary"
-import { Config } from "../config/config"
 import { NamedError } from "@opencode-ai/util/error"
 import { NamedError } from "@opencode-ai/util/error"
 import { fn } from "@/util/fn"
 import { fn } from "@/util/fn"
 import { SessionProcessor } from "./processor"
 import { SessionProcessor } from "./processor"
 import { TaskTool } from "@/tool/task"
 import { TaskTool } from "@/tool/task"
 import { SessionStatus } from "./status"
 import { SessionStatus } from "./status"
+import { LLM } from "./llm"
+import { iife } from "@/util/iife"
 import { Shell } from "@/shell/shell"
 import { Shell } from "@/shell/shell"
 
 
 // @ts-ignore
 // @ts-ignore
@@ -96,8 +87,8 @@ export namespace SessionPrompt {
       .optional(),
       .optional(),
     agent: z.string().optional(),
     agent: z.string().optional(),
     noReply: z.boolean().optional(),
     noReply: z.boolean().optional(),
-    system: z.string().optional(),
     tools: z.record(z.string(), z.boolean()).optional(),
     tools: z.record(z.string(), z.boolean()).optional(),
+    system: z.string().optional(),
     parts: z.array(
     parts: z.array(
       z.discriminatedUnion("type", [
       z.discriminatedUnion("type", [
         MessageV2.TextPart.omit({
         MessageV2.TextPart.omit({
@@ -145,6 +136,20 @@ export namespace SessionPrompt {
   })
   })
   export type PromptInput = z.infer<typeof PromptInput>
   export type PromptInput = z.infer<typeof PromptInput>
 
 
+  export const prompt = fn(PromptInput, async (input) => {
+    const session = await Session.get(input.sessionID)
+    await SessionRevert.cleanup(session)
+
+    const message = await createUserMessage(input)
+    await Session.touch(input.sessionID)
+
+    if (input.noReply === true) {
+      return message
+    }
+
+    return loop(input.sessionID)
+  })
+
   export async function resolvePromptParts(template: string): Promise<PromptInput["parts"]> {
   export async function resolvePromptParts(template: string): Promise<PromptInput["parts"]> {
     const parts: PromptInput["parts"] = [
     const parts: PromptInput["parts"] = [
       {
       {
@@ -196,20 +201,6 @@ export namespace SessionPrompt {
     return parts
     return parts
   }
   }
 
 
-  export const prompt = fn(PromptInput, async (input) => {
-    const session = await Session.get(input.sessionID)
-    await SessionRevert.cleanup(session)
-
-    const message = await createUserMessage(input)
-    await Session.touch(input.sessionID)
-
-    if (input.noReply === true) {
-      return message
-    }
-
-    return loop(input.sessionID)
-  })
-
   function start(sessionID: string) {
   function start(sessionID: string) {
     const s = state()
     const s = state()
     if (s[sessionID]) return
     if (s[sessionID]) return
@@ -291,7 +282,6 @@ export namespace SessionPrompt {
         })
         })
 
 
       const model = await Provider.getModel(lastUser.model.providerID, lastUser.model.modelID)
       const model = await Provider.getModel(lastUser.model.providerID, lastUser.model.modelID)
-      const language = await Provider.getLanguage(model)
       const task = tasks.pop()
       const task = tasks.pop()
 
 
       // pending subtask
       // pending subtask
@@ -304,6 +294,7 @@ export namespace SessionPrompt {
           parentID: lastUser.id,
           parentID: lastUser.id,
           sessionID,
           sessionID,
           mode: task.agent,
           mode: task.agent,
+          agent: task.agent,
           path: {
           path: {
             cwd: Instance.directory,
             cwd: Instance.directory,
             root: Instance.worktree,
             root: Instance.worktree,
@@ -414,11 +405,6 @@ export namespace SessionPrompt {
           messages: msgs,
           messages: msgs,
           parentID: lastUser.id,
           parentID: lastUser.id,
           abort,
           abort,
-          agent: lastUser.agent,
-          model: {
-            providerID: model.providerID,
-            modelID: model.id,
-          },
           sessionID,
           sessionID,
           auto: task.auto,
           auto: task.auto,
         })
         })
@@ -442,7 +428,6 @@ export namespace SessionPrompt {
       }
       }
 
 
       // normal processing
       // normal processing
-      const cfg = await Config.get()
       const agent = await Agent.get(lastUser.agent)
       const agent = await Agent.get(lastUser.agent)
       const maxSteps = agent.maxSteps ?? Infinity
       const maxSteps = agent.maxSteps ?? Infinity
       const isLastStep = step >= maxSteps
       const isLastStep = step >= maxSteps
@@ -450,12 +435,14 @@ export namespace SessionPrompt {
         messages: msgs,
         messages: msgs,
         agent,
         agent,
       })
       })
+
       const processor = SessionProcessor.create({
       const processor = SessionProcessor.create({
         assistantMessage: (await Session.updateMessage({
         assistantMessage: (await Session.updateMessage({
           id: Identifier.ascending("message"),
           id: Identifier.ascending("message"),
           parentID: lastUser.id,
           parentID: lastUser.id,
           role: "assistant",
           role: "assistant",
           mode: agent.name,
           mode: agent.name,
+          agent: agent.name,
           path: {
           path: {
             cwd: Instance.directory,
             cwd: Instance.directory,
             root: Instance.worktree,
             root: Instance.worktree,
@@ -478,12 +465,6 @@ export namespace SessionPrompt {
         model,
         model,
         abort,
         abort,
       })
       })
-      const system = await resolveSystemPrompt({
-        model,
-        agent,
-        system: lastUser.system,
-        isLastStep,
-      })
       const tools = await resolveTools({
       const tools = await resolveTools({
         agent,
         agent,
         sessionID,
         sessionID,
@@ -491,30 +472,6 @@ export namespace SessionPrompt {
         tools: lastUser.tools,
         tools: lastUser.tools,
         processor,
         processor,
       })
       })
-      const provider = await Provider.getProvider(model.providerID)
-      const params = await Plugin.trigger(
-        "chat.params",
-        {
-          sessionID: sessionID,
-          agent: lastUser.agent,
-          model: model,
-          provider,
-          message: lastUser,
-        },
-        {
-          temperature: model.capabilities.temperature
-            ? (agent.temperature ?? ProviderTransform.temperature(model))
-            : undefined,
-          topP: agent.topP ?? ProviderTransform.topP(model),
-          topK: ProviderTransform.topK(model),
-          options: pipe(
-            {},
-            mergeDeep(ProviderTransform.options(model, sessionID, provider?.options)),
-            mergeDeep(model.options),
-            mergeDeep(agent.options),
-          ),
-        },
-      )
 
 
       if (step === 1) {
       if (step === 1) {
         SessionSummary.summarize({
         SessionSummary.summarize({
@@ -523,135 +480,25 @@ export namespace SessionPrompt {
         })
         })
       }
       }
 
 
-      // Deep copy message history so that modifications made by plugins do not
-      // affect the original messages
-      const sessionMessages = clone(
-        msgs.filter((m) => {
-          if (m.info.role !== "assistant" || m.info.error === undefined) {
-            return true
-          }
-          if (
-            MessageV2.AbortedError.isInstance(m.info.error) &&
-            m.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning")
-          ) {
-            return true
-          }
-          return false
-        }),
-      )
-
-      await Plugin.trigger("experimental.chat.messages.transform", {}, { messages: sessionMessages })
-
-      const messages: ModelMessage[] = [
-        ...system.map(
-          (x): ModelMessage => ({
-            role: "system",
-            content: x,
-          }),
-        ),
-        ...MessageV2.toModelMessage(sessionMessages),
-        ...(isLastStep
-          ? [
-              {
-                role: "assistant" as const,
-                content: MAX_STEPS,
-              },
-            ]
-          : []),
-      ]
-
       const result = await processor.process({
       const result = await processor.process({
-        onError(error) {
-          log.error("stream error", {
-            error,
-          })
-        },
-        async experimental_repairToolCall(input) {
-          const lower = input.toolCall.toolName.toLowerCase()
-          if (lower !== input.toolCall.toolName && tools[lower]) {
-            log.info("repairing tool call", {
-              tool: input.toolCall.toolName,
-              repaired: lower,
-            })
-            return {
-              ...input.toolCall,
-              toolName: lower,
-            }
-          }
-          return {
-            ...input.toolCall,
-            input: JSON.stringify({
-              tool: input.toolCall.toolName,
-              error: input.error.message,
-            }),
-            toolName: "invalid",
-          }
-        },
-        headers: {
-          ...(model.providerID.startsWith("opencode")
-            ? {
-                "x-opencode-project": Instance.project.id,
-                "x-opencode-session": sessionID,
-                "x-opencode-request": lastUser.id,
-                "x-opencode-client": Flag.OPENCODE_CLIENT,
-              }
-            : undefined),
-          ...model.headers,
-        },
-        // set to 0, we handle loop
-        maxRetries: 0,
-        activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
-        maxOutputTokens: ProviderTransform.maxOutputTokens(
-          model.api.npm,
-          params.options,
-          model.limit.output,
-          OUTPUT_TOKEN_MAX,
-        ),
-        abortSignal: abort,
-        providerOptions: ProviderTransform.providerOptions(model, params.options),
-        stopWhen: stepCountIs(1),
-        temperature: params.temperature,
-        topP: params.topP,
-        topK: params.topK,
-        toolChoice: isLastStep ? "none" : undefined,
-        messages,
-        tools: model.capabilities.toolcall === false ? undefined : tools,
-        model: wrapLanguageModel({
-          model: language,
-          middleware: [
-            {
-              async transformParams(args) {
-                if (args.type === "stream") {
-                  // @ts-expect-error - prompt types are compatible at runtime
-                  args.params.prompt = ProviderTransform.message(args.params.prompt, model)
-                }
-                // Transform tool schemas for provider compatibility
-                if (args.params.tools && Array.isArray(args.params.tools)) {
-                  args.params.tools = args.params.tools.map((tool: any) => {
-                    // Tools at middleware level have inputSchema, not parameters
-                    if (tool.inputSchema && typeof tool.inputSchema === "object") {
-                      // Transform the inputSchema for provider compatibility
-                      return {
-                        ...tool,
-                        inputSchema: ProviderTransform.schema(model, tool.inputSchema),
-                      }
-                    }
-                    // If no inputSchema, return tool unchanged
-                    return tool
-                  })
-                }
-                return args.params
-              },
-            },
-          ],
-        }),
-        experimental_telemetry: {
-          isEnabled: cfg.experimental?.openTelemetry,
-          metadata: {
-            userId: cfg.username ?? "unknown",
-            sessionId: sessionID,
-          },
-        },
+        user: lastUser,
+        agent,
+        abort,
+        sessionID,
+        system: [...(await SystemPrompt.environment()), ...(await SystemPrompt.custom())],
+        messages: [
+          ...MessageV2.toModelMessage(msgs),
+          ...(isLastStep
+            ? [
+                {
+                  role: "assistant" as const,
+                  content: MAX_STEPS,
+                },
+              ]
+            : []),
+        ],
+        tools,
+        model,
       })
       })
       if (result === "stop") break
       if (result === "stop") break
       continue
       continue
@@ -675,33 +522,6 @@ export namespace SessionPrompt {
     return Provider.defaultModel()
     return Provider.defaultModel()
   }
   }
 
 
-  async function resolveSystemPrompt(input: {
-    system?: string
-    agent: Agent.Info
-    model: Provider.Model
-    isLastStep?: boolean
-  }) {
-    let system = SystemPrompt.header(input.model.providerID)
-    system.push(
-      ...(() => {
-        if (input.system) return [input.system]
-        if (input.agent.prompt) return [input.agent.prompt]
-        return SystemPrompt.provider(input.model)
-      })(),
-    )
-    system.push(...(await SystemPrompt.environment()))
-    system.push(...(await SystemPrompt.custom()))
-
-    if (input.isLastStep) {
-      system.push(MAX_STEPS)
-    }
-
-    // max 2 system prompt messages for caching purposes
-    const [first, ...rest] = system
-    system = [first, rest.join("\n")]
-    return system
-  }
-
   async function resolveTools(input: {
   async function resolveTools(input: {
     agent: Agent.Info
     agent: Agent.Info
     model: Provider.Model
     model: Provider.Model
@@ -709,6 +529,7 @@ export namespace SessionPrompt {
     tools?: Record<string, boolean>
     tools?: Record<string, boolean>
     processor: SessionProcessor.Info
     processor: SessionProcessor.Info
   }) {
   }) {
+    using _ = log.time("resolveTools")
     const tools: Record<string, AITool> = {}
     const tools: Record<string, AITool> = {}
     const enabledTools = pipe(
     const enabledTools = pipe(
       input.agent.tools,
       input.agent.tools,
@@ -778,7 +599,6 @@ export namespace SessionPrompt {
         },
         },
       })
       })
     }
     }
-
     for (const [key, item] of Object.entries(await MCP.tools())) {
     for (const [key, item] of Object.entries(await MCP.tools())) {
       if (Wildcard.all(key, enabledTools) === false) continue
       if (Wildcard.all(key, enabledTools) === false) continue
       const execute = item.execute
       const execute = item.execute
@@ -857,7 +677,6 @@ export namespace SessionPrompt {
         created: Date.now(),
         created: Date.now(),
       },
       },
       tools: input.tools,
       tools: input.tools,
-      system: input.system,
       agent: agent.name,
       agent: agent.name,
       model: input.model ?? agent.model ?? (await lastModel(input.sessionID)),
       model: input.model ?? agent.model ?? (await lastModel(input.sessionID)),
     }
     }
@@ -1148,7 +967,7 @@ export namespace SessionPrompt {
         synthetic: true,
         synthetic: true,
       })
       })
     }
     }
-    const wasPlan = input.messages.some((msg) => msg.info.role === "assistant" && msg.info.mode === "plan")
+    const wasPlan = input.messages.some((msg) => msg.info.role === "assistant" && msg.info.agent === "plan")
     if (wasPlan && input.agent.name === "build") {
     if (wasPlan && input.agent.name === "build") {
       userMessage.parts.push({
       userMessage.parts.push({
         id: Identifier.ascending("part"),
         id: Identifier.ascending("part"),
@@ -1216,6 +1035,7 @@ export namespace SessionPrompt {
       sessionID: input.sessionID,
       sessionID: input.sessionID,
       parentID: userMsg.id,
       parentID: userMsg.id,
       mode: input.agent,
       mode: input.agent,
+      agent: input.agent,
       cost: 0,
       cost: 0,
       path: {
       path: {
         cwd: Instance.directory,
         cwd: Instance.directory,
@@ -1510,28 +1330,24 @@ export namespace SessionPrompt {
       input.history.filter((m) => m.info.role === "user" && !m.parts.every((p) => "synthetic" in p && p.synthetic))
       input.history.filter((m) => m.info.role === "user" && !m.parts.every((p) => "synthetic" in p && p.synthetic))
         .length === 1
         .length === 1
     if (!isFirst) return
     if (!isFirst) return
-    const cfg = await Config.get()
-    const small =
-      (await Provider.getSmallModel(input.providerID)) ?? (await Provider.getModel(input.providerID, input.modelID))
-    const language = await Provider.getLanguage(small)
-    const provider = await Provider.getProvider(small.providerID)
-    const options = pipe(
-      {},
-      mergeDeep(ProviderTransform.options(small, input.session.id, provider?.options)),
-      mergeDeep(ProviderTransform.smallOptions(small)),
-      mergeDeep(small.options),
-    )
-    await generateText({
-      // use higher # for reasoning models since reasoning tokens eat up a lot of the budget
-      maxOutputTokens: small.capabilities.reasoning ? 3000 : 20,
-      providerOptions: ProviderTransform.providerOptions(small, options),
+    const agent = await Agent.get("title")
+    if (!agent) return
+    const result = await LLM.stream({
+      agent,
+      user: input.message.info as MessageV2.User,
+      system: [],
+      small: true,
+      tools: {},
+      model: await iife(async () => {
+        if (agent.model) return await Provider.getModel(agent.model.providerID, agent.model.modelID)
+        return (
+          (await Provider.getSmallModel(input.providerID)) ?? (await Provider.getModel(input.providerID, input.modelID))
+        )
+      }),
+      abort: new AbortController().signal,
+      sessionID: input.session.id,
+      retries: 2,
       messages: [
       messages: [
-        ...SystemPrompt.title(small.providerID).map(
-          (x): ModelMessage => ({
-            role: "system",
-            content: x,
-          }),
-        ),
         {
         {
           role: "user",
           role: "user",
           content: "Generate a title for this conversation:\n",
           content: "Generate a title for this conversation:\n",
@@ -1555,32 +1371,19 @@ export namespace SessionPrompt {
           },
           },
         ]),
         ]),
       ],
       ],
-      headers: small.headers,
-      model: language,
-      experimental_telemetry: {
-        isEnabled: cfg.experimental?.openTelemetry,
-        metadata: {
-          userId: cfg.username ?? "unknown",
-          sessionId: input.session.id,
-        },
-      },
     })
     })
-      .then((result) => {
-        if (result.text)
-          return Session.update(input.session.id, (draft) => {
-            const cleaned = result.text
-              .replace(/<think>[\s\S]*?<\/think>\s*/g, "")
-              .split("\n")
-              .map((line) => line.trim())
-              .find((line) => line.length > 0)
-            if (!cleaned) return
-
-            const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
-            draft.title = title
-          })
-      })
-      .catch((error) => {
-        log.error("failed to generate title", { error, model: small.id })
+    const text = await result.text.catch((err) => log.error("failed to generate title", { error: err }))
+    if (text)
+      return Session.update(input.session.id, (draft) => {
+        const cleaned = text
+          .replace(/<think>[\s\S]*?<\/think>\s*/g, "")
+          .split("\n")
+          .map((line) => line.trim())
+          .find((line) => line.length > 0)
+        if (!cleaned) return
+
+        const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
+        draft.title = title
       })
       })
   }
   }
 }
 }

+ 37 - 55
packages/opencode/src/session/summary.ts

@@ -1,20 +1,21 @@
 import { Provider } from "@/provider/provider"
 import { Provider } from "@/provider/provider"
-import { Config } from "@/config/config"
+
 import { fn } from "@/util/fn"
 import { fn } from "@/util/fn"
 import z from "zod"
 import z from "zod"
 import { Session } from "."
 import { Session } from "."
-import { generateText, type ModelMessage } from "ai"
+
 import { MessageV2 } from "./message-v2"
 import { MessageV2 } from "./message-v2"
 import { Identifier } from "@/id/id"
 import { Identifier } from "@/id/id"
 import { Snapshot } from "@/snapshot"
 import { Snapshot } from "@/snapshot"
-import { ProviderTransform } from "@/provider/transform"
-import { SystemPrompt } from "./system"
+
 import { Log } from "@/util/log"
 import { Log } from "@/util/log"
 import path from "path"
 import path from "path"
 import { Instance } from "@/project/instance"
 import { Instance } from "@/project/instance"
 import { Storage } from "@/storage/storage"
 import { Storage } from "@/storage/storage"
 import { Bus } from "@/bus"
 import { Bus } from "@/bus"
-import { mergeDeep, pipe } from "remeda"
+
+import { LLM } from "./llm"
+import { Agent } from "@/agent/agent"
 
 
 export namespace SessionSummary {
 export namespace SessionSummary {
   const log = Log.create({ service: "session.summary" })
   const log = Log.create({ service: "session.summary" })
@@ -61,7 +62,6 @@ export namespace SessionSummary {
   }
   }
 
 
   async function summarizeMessage(input: { messageID: string; messages: MessageV2.WithParts[] }) {
   async function summarizeMessage(input: { messageID: string; messages: MessageV2.WithParts[] }) {
-    const cfg = await Config.get()
     const messages = input.messages.filter(
     const messages = input.messages.filter(
       (m) => m.info.id === input.messageID || (m.info.role === "assistant" && m.info.parentID === input.messageID),
       (m) => m.info.id === input.messageID || (m.info.role === "assistant" && m.info.parentID === input.messageID),
     )
     )
@@ -78,27 +78,17 @@ export namespace SessionSummary {
     const small =
     const small =
       (await Provider.getSmallModel(assistantMsg.providerID)) ??
       (await Provider.getSmallModel(assistantMsg.providerID)) ??
       (await Provider.getModel(assistantMsg.providerID, assistantMsg.modelID))
       (await Provider.getModel(assistantMsg.providerID, assistantMsg.modelID))
-    const language = await Provider.getLanguage(small)
-
-    const options = pipe(
-      {},
-      mergeDeep(ProviderTransform.options(small, assistantMsg.sessionID)),
-      mergeDeep(ProviderTransform.smallOptions(small)),
-      mergeDeep(small.options),
-    )
 
 
     const textPart = msgWithParts.parts.find((p) => p.type === "text" && !p.synthetic) as MessageV2.TextPart
     const textPart = msgWithParts.parts.find((p) => p.type === "text" && !p.synthetic) as MessageV2.TextPart
     if (textPart && !userMsg.summary?.title) {
     if (textPart && !userMsg.summary?.title) {
-      const result = await generateText({
-        maxOutputTokens: small.capabilities.reasoning ? 1500 : 20,
-        providerOptions: ProviderTransform.providerOptions(small, options),
+      const agent = await Agent.get("title")
+      const stream = await LLM.stream({
+        agent,
+        user: userMsg,
+        tools: {},
+        model: agent.model ? await Provider.getModel(agent.model.providerID, agent.model.modelID) : small,
+        small: true,
         messages: [
         messages: [
-          ...SystemPrompt.title(small.providerID).map(
-            (x): ModelMessage => ({
-              role: "system",
-              content: x,
-            }),
-          ),
           {
           {
             role: "user" as const,
             role: "user" as const,
             content: `
             content: `
@@ -109,18 +99,14 @@ export namespace SessionSummary {
             `,
             `,
           },
           },
         ],
         ],
-        headers: small.headers,
-        model: language,
-        experimental_telemetry: {
-          isEnabled: cfg.experimental?.openTelemetry,
-          metadata: {
-            userId: cfg.username ?? "unknown",
-            sessionId: assistantMsg.sessionID,
-          },
-        },
+        abort: new AbortController().signal,
+        sessionID: userMsg.sessionID,
+        system: [],
+        retries: 3,
       })
       })
-      log.info("title", { title: result.text })
-      userMsg.summary.title = result.text
+      const result = await stream.text
+      log.info("title", { title: result })
+      userMsg.summary.title = result
       await Session.updateMessage(userMsg)
       await Session.updateMessage(userMsg)
     }
     }
 
 
@@ -138,34 +124,30 @@ export namespace SessionSummary {
             }
             }
           }
           }
         }
         }
-        const result = await generateText({
-          model: language,
-          maxOutputTokens: 100,
-          providerOptions: ProviderTransform.providerOptions(small, options),
+        const summaryAgent = await Agent.get("summary")
+        const stream = await LLM.stream({
+          agent: summaryAgent,
+          user: userMsg,
+          tools: {},
+          model: summaryAgent.model
+            ? await Provider.getModel(summaryAgent.model.providerID, summaryAgent.model.modelID)
+            : small,
+          small: true,
           messages: [
           messages: [
-            ...SystemPrompt.summarize(small.providerID).map(
-              (x): ModelMessage => ({
-                role: "system",
-                content: x,
-              }),
-            ),
             ...MessageV2.toModelMessage(messages),
             ...MessageV2.toModelMessage(messages),
             {
             {
-              role: "user",
+              role: "user" as const,
               content: `Summarize the above conversation according to your system prompts.`,
               content: `Summarize the above conversation according to your system prompts.`,
             },
             },
           ],
           ],
-          headers: small.headers,
-          experimental_telemetry: {
-            isEnabled: cfg.experimental?.openTelemetry,
-            metadata: {
-              userId: cfg.username ?? "unknown",
-              sessionId: assistantMsg.sessionID,
-            },
-          },
-        }).catch(() => {})
+          abort: new AbortController().signal,
+          sessionID: userMsg.sessionID,
+          system: [],
+          retries: 3,
+        })
+        const result = await stream.text
         if (result) {
         if (result) {
-          userMsg.summary.body = result.text
+          userMsg.summary.body = result
         }
         }
       }
       }
       await Session.updateMessage(userMsg)
       await Session.updateMessage(userMsg)

+ 1 - 29
packages/opencode/src/session/system.ts

@@ -14,8 +14,7 @@ import PROMPT_BEAST from "./prompt/beast.txt"
 import PROMPT_GEMINI from "./prompt/gemini.txt"
 import PROMPT_GEMINI from "./prompt/gemini.txt"
 import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt"
 import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt"
 import PROMPT_COMPACTION from "./prompt/compaction.txt"
 import PROMPT_COMPACTION from "./prompt/compaction.txt"
-import PROMPT_SUMMARIZE from "./prompt/summarize.txt"
-import PROMPT_TITLE from "./prompt/title.txt"
+
 import PROMPT_CODEX from "./prompt/codex.txt"
 import PROMPT_CODEX from "./prompt/codex.txt"
 import type { Provider } from "@/provider/provider"
 import type { Provider } from "@/provider/provider"
 
 
@@ -118,31 +117,4 @@ export namespace SystemPrompt {
     )
     )
     return Promise.all(found).then((result) => result.filter(Boolean))
     return Promise.all(found).then((result) => result.filter(Boolean))
   }
   }
-
-  export function compaction(providerID: string) {
-    switch (providerID) {
-      case "anthropic":
-        return [PROMPT_ANTHROPIC_SPOOF.trim(), PROMPT_COMPACTION]
-      default:
-        return [PROMPT_COMPACTION]
-    }
-  }
-
-  export function summarize(providerID: string) {
-    switch (providerID) {
-      case "anthropic":
-        return [PROMPT_ANTHROPIC_SPOOF.trim(), PROMPT_SUMMARIZE]
-      default:
-        return [PROMPT_SUMMARIZE]
-    }
-  }
-
-  export function title(providerID: string) {
-    switch (providerID) {
-      case "anthropic":
-        return [PROMPT_ANTHROPIC_SPOOF.trim(), PROMPT_TITLE]
-      default:
-        return [PROMPT_TITLE]
-    }
-  }
 }
 }

+ 0 - 1
packages/opencode/src/tool/bash.ts

@@ -50,7 +50,6 @@ const parser = lazy(async () => {
 })
 })
 
 
 // TODO: we may wanna rename this tool so it works better on other shells
 // TODO: we may wanna rename this tool so it works better on other shells
-
 export const BashTool = Tool.define("bash", async () => {
 export const BashTool = Tool.define("bash", async () => {
   const shell = Shell.acceptable()
   const shell = Shell.acceptable()
   log.info("bash tool using shell", { shell })
   log.info("bash tool using shell", { shell })

+ 10 - 4
packages/opencode/src/tool/registry.ts

@@ -21,8 +21,11 @@ import { Plugin } from "../plugin"
 import { WebSearchTool } from "./websearch"
 import { WebSearchTool } from "./websearch"
 import { CodeSearchTool } from "./codesearch"
 import { CodeSearchTool } from "./codesearch"
 import { Flag } from "@/flag/flag"
 import { Flag } from "@/flag/flag"
+import { Log } from "@/util/log"
 
 
 export namespace ToolRegistry {
 export namespace ToolRegistry {
+  const log = Log.create({ service: "tool.registry" })
+
   export const state = Instance.state(async () => {
   export const state = Instance.state(async () => {
     const custom = [] as Tool.Info[]
     const custom = [] as Tool.Info[]
     const glob = new Bun.Glob("tool/*.{js,ts}")
     const glob = new Bun.Glob("tool/*.{js,ts}")
@@ -119,10 +122,13 @@ export namespace ToolRegistry {
           }
           }
           return true
           return true
         })
         })
-        .map(async (t) => ({
-          id: t.id,
-          ...(await t.init()),
-        })),
+        .map(async (t) => {
+          using _ = log.time(t.id)
+          return {
+            id: t.id,
+            ...(await t.init()),
+          }
+        }),
     )
     )
     return result
     return result
   }
   }

+ 4 - 4
packages/sdk/js/src/v2/gen/sdk.gen.ts

@@ -1203,10 +1203,10 @@ export class Session extends HeyApiClient {
       }
       }
       agent?: string
       agent?: string
       noReply?: boolean
       noReply?: boolean
-      system?: string
       tools?: {
       tools?: {
         [key: string]: boolean
         [key: string]: boolean
       }
       }
+      system?: string
       parts?: Array<TextPartInput | FilePartInput | AgentPartInput | SubtaskPartInput>
       parts?: Array<TextPartInput | FilePartInput | AgentPartInput | SubtaskPartInput>
     },
     },
     options?: Options<never, ThrowOnError>,
     options?: Options<never, ThrowOnError>,
@@ -1222,8 +1222,8 @@ export class Session extends HeyApiClient {
             { in: "body", key: "model" },
             { in: "body", key: "model" },
             { in: "body", key: "agent" },
             { in: "body", key: "agent" },
             { in: "body", key: "noReply" },
             { in: "body", key: "noReply" },
-            { in: "body", key: "system" },
             { in: "body", key: "tools" },
             { in: "body", key: "tools" },
+            { in: "body", key: "system" },
             { in: "body", key: "parts" },
             { in: "body", key: "parts" },
           ],
           ],
         },
         },
@@ -1289,10 +1289,10 @@ export class Session extends HeyApiClient {
       }
       }
       agent?: string
       agent?: string
       noReply?: boolean
       noReply?: boolean
-      system?: string
       tools?: {
       tools?: {
         [key: string]: boolean
         [key: string]: boolean
       }
       }
+      system?: string
       parts?: Array<TextPartInput | FilePartInput | AgentPartInput | SubtaskPartInput>
       parts?: Array<TextPartInput | FilePartInput | AgentPartInput | SubtaskPartInput>
     },
     },
     options?: Options<never, ThrowOnError>,
     options?: Options<never, ThrowOnError>,
@@ -1308,8 +1308,8 @@ export class Session extends HeyApiClient {
             { in: "body", key: "model" },
             { in: "body", key: "model" },
             { in: "body", key: "agent" },
             { in: "body", key: "agent" },
             { in: "body", key: "noReply" },
             { in: "body", key: "noReply" },
-            { in: "body", key: "system" },
             { in: "body", key: "tools" },
             { in: "body", key: "tools" },
+            { in: "body", key: "system" },
             { in: "body", key: "parts" },
             { in: "body", key: "parts" },
           ],
           ],
         },
         },

+ 41 - 39
packages/sdk/js/src/v2/gen/types.gen.ts

@@ -147,6 +147,7 @@ export type AssistantMessage = {
   modelID: string
   modelID: string
   providerID: string
   providerID: string
   mode: string
   mode: string
+  agent: string
   path: {
   path: {
     cwd: string
     cwd: string
     root: string
     root: string
@@ -475,6 +476,40 @@ export type EventPermissionReplied = {
   }
   }
 }
 }
 
 
+export type EventFileEdited = {
+  type: "file.edited"
+  properties: {
+    file: string
+  }
+}
+
+export type Todo = {
+  /**
+   * Brief description of the task
+   */
+  content: string
+  /**
+   * Current status of the task: pending, in_progress, completed, cancelled
+   */
+  status: string
+  /**
+   * Priority level of the task: high, medium, low
+   */
+  priority: string
+  /**
+   * Unique identifier for the todo item
+   */
+  id: string
+}
+
+export type EventTodoUpdated = {
+  type: "todo.updated"
+  properties: {
+    sessionID: string
+    todos: Array<Todo>
+  }
+}
+
 export type SessionStatus =
 export type SessionStatus =
   | {
   | {
       type: "idle"
       type: "idle"
@@ -511,40 +546,6 @@ export type EventSessionCompacted = {
   }
   }
 }
 }
 
 
-export type EventFileEdited = {
-  type: "file.edited"
-  properties: {
-    file: string
-  }
-}
-
-export type Todo = {
-  /**
-   * Brief description of the task
-   */
-  content: string
-  /**
-   * Current status of the task: pending, in_progress, completed, cancelled
-   */
-  status: string
-  /**
-   * Priority level of the task: high, medium, low
-   */
-  priority: string
-  /**
-   * Unique identifier for the todo item
-   */
-  id: string
-}
-
-export type EventTodoUpdated = {
-  type: "todo.updated"
-  properties: {
-    sessionID: string
-    todos: Array<Todo>
-  }
-}
-
 export type EventCommandExecuted = {
 export type EventCommandExecuted = {
   type: "command.executed"
   type: "command.executed"
   properties: {
   properties: {
@@ -745,11 +746,11 @@ export type Event =
   | EventMessagePartRemoved
   | EventMessagePartRemoved
   | EventPermissionUpdated
   | EventPermissionUpdated
   | EventPermissionReplied
   | EventPermissionReplied
+  | EventFileEdited
+  | EventTodoUpdated
   | EventSessionStatus
   | EventSessionStatus
   | EventSessionIdle
   | EventSessionIdle
   | EventSessionCompacted
   | EventSessionCompacted
-  | EventFileEdited
-  | EventTodoUpdated
   | EventCommandExecuted
   | EventCommandExecuted
   | EventSessionCreated
   | EventSessionCreated
   | EventSessionUpdated
   | EventSessionUpdated
@@ -1738,7 +1739,8 @@ export type Agent = {
   name: string
   name: string
   description?: string
   description?: string
   mode: "subagent" | "primary" | "all"
   mode: "subagent" | "primary" | "all"
-  builtIn: boolean
+  native?: boolean
+  hidden?: boolean
   topP?: number
   topP?: number
   temperature?: number
   temperature?: number
   color?: string
   color?: string
@@ -2801,10 +2803,10 @@ export type SessionPromptData = {
     }
     }
     agent?: string
     agent?: string
     noReply?: boolean
     noReply?: boolean
-    system?: string
     tools?: {
     tools?: {
       [key: string]: boolean
       [key: string]: boolean
     }
     }
+    system?: string
     parts: Array<TextPartInput | FilePartInput | AgentPartInput | SubtaskPartInput>
     parts: Array<TextPartInput | FilePartInput | AgentPartInput | SubtaskPartInput>
   }
   }
   path: {
   path: {
@@ -2896,10 +2898,10 @@ export type SessionPromptAsyncData = {
     }
     }
     agent?: string
     agent?: string
     noReply?: boolean
     noReply?: boolean
-    system?: string
     tools?: {
     tools?: {
       [key: string]: boolean
       [key: string]: boolean
     }
     }
+    system?: string
     parts: Array<TextPartInput | FilePartInput | AgentPartInput | SubtaskPartInput>
     parts: Array<TextPartInput | FilePartInput | AgentPartInput | SubtaskPartInput>
   }
   }
   path: {
   path: {

+ 86 - 79
packages/sdk/openapi.json

@@ -1997,9 +1997,6 @@
                   "noReply": {
                   "noReply": {
                     "type": "boolean"
                     "type": "boolean"
                   },
                   },
-                  "system": {
-                    "type": "string"
-                  },
                   "tools": {
                   "tools": {
                     "type": "object",
                     "type": "object",
                     "propertyNames": {
                     "propertyNames": {
@@ -2009,6 +2006,9 @@
                       "type": "boolean"
                       "type": "boolean"
                     }
                     }
                   },
                   },
+                  "system": {
+                    "type": "string"
+                  },
                   "parts": {
                   "parts": {
                     "type": "array",
                     "type": "array",
                     "items": {
                     "items": {
@@ -2202,9 +2202,6 @@
                   "noReply": {
                   "noReply": {
                     "type": "boolean"
                     "type": "boolean"
                   },
                   },
-                  "system": {
-                    "type": "string"
-                  },
                   "tools": {
                   "tools": {
                     "type": "object",
                     "type": "object",
                     "propertyNames": {
                     "propertyNames": {
@@ -2214,6 +2211,9 @@
                       "type": "boolean"
                       "type": "boolean"
                     }
                     }
                   },
                   },
+                  "system": {
+                    "type": "string"
+                  },
                   "parts": {
                   "parts": {
                     "type": "array",
                     "type": "array",
                     "items": {
                     "items": {
@@ -5193,6 +5193,9 @@
           "mode": {
           "mode": {
             "type": "string"
             "type": "string"
           },
           },
+          "agent": {
+            "type": "string"
+          },
           "path": {
           "path": {
             "type": "object",
             "type": "object",
             "properties": {
             "properties": {
@@ -5251,6 +5254,7 @@
           "modelID",
           "modelID",
           "providerID",
           "providerID",
           "mode",
           "mode",
+          "agent",
           "path",
           "path",
           "cost",
           "cost",
           "tokens"
           "tokens"
@@ -6152,6 +6156,72 @@
         },
         },
         "required": ["type", "properties"]
         "required": ["type", "properties"]
       },
       },
+      "Event.file.edited": {
+        "type": "object",
+        "properties": {
+          "type": {
+            "type": "string",
+            "const": "file.edited"
+          },
+          "properties": {
+            "type": "object",
+            "properties": {
+              "file": {
+                "type": "string"
+              }
+            },
+            "required": ["file"]
+          }
+        },
+        "required": ["type", "properties"]
+      },
+      "Todo": {
+        "type": "object",
+        "properties": {
+          "content": {
+            "description": "Brief description of the task",
+            "type": "string"
+          },
+          "status": {
+            "description": "Current status of the task: pending, in_progress, completed, cancelled",
+            "type": "string"
+          },
+          "priority": {
+            "description": "Priority level of the task: high, medium, low",
+            "type": "string"
+          },
+          "id": {
+            "description": "Unique identifier for the todo item",
+            "type": "string"
+          }
+        },
+        "required": ["content", "status", "priority", "id"]
+      },
+      "Event.todo.updated": {
+        "type": "object",
+        "properties": {
+          "type": {
+            "type": "string",
+            "const": "todo.updated"
+          },
+          "properties": {
+            "type": "object",
+            "properties": {
+              "sessionID": {
+                "type": "string"
+              },
+              "todos": {
+                "type": "array",
+                "items": {
+                  "$ref": "#/components/schemas/Todo"
+                }
+              }
+            },
+            "required": ["sessionID", "todos"]
+          }
+        },
+        "required": ["type", "properties"]
+      },
       "SessionStatus": {
       "SessionStatus": {
         "anyOf": [
         "anyOf": [
           {
           {
@@ -6255,72 +6325,6 @@
         },
         },
         "required": ["type", "properties"]
         "required": ["type", "properties"]
       },
       },
-      "Event.file.edited": {
-        "type": "object",
-        "properties": {
-          "type": {
-            "type": "string",
-            "const": "file.edited"
-          },
-          "properties": {
-            "type": "object",
-            "properties": {
-              "file": {
-                "type": "string"
-              }
-            },
-            "required": ["file"]
-          }
-        },
-        "required": ["type", "properties"]
-      },
-      "Todo": {
-        "type": "object",
-        "properties": {
-          "content": {
-            "description": "Brief description of the task",
-            "type": "string"
-          },
-          "status": {
-            "description": "Current status of the task: pending, in_progress, completed, cancelled",
-            "type": "string"
-          },
-          "priority": {
-            "description": "Priority level of the task: high, medium, low",
-            "type": "string"
-          },
-          "id": {
-            "description": "Unique identifier for the todo item",
-            "type": "string"
-          }
-        },
-        "required": ["content", "status", "priority", "id"]
-      },
-      "Event.todo.updated": {
-        "type": "object",
-        "properties": {
-          "type": {
-            "type": "string",
-            "const": "todo.updated"
-          },
-          "properties": {
-            "type": "object",
-            "properties": {
-              "sessionID": {
-                "type": "string"
-              },
-              "todos": {
-                "type": "array",
-                "items": {
-                  "$ref": "#/components/schemas/Todo"
-                }
-              }
-            },
-            "required": ["sessionID", "todos"]
-          }
-        },
-        "required": ["type", "properties"]
-      },
       "Event.command.executed": {
       "Event.command.executed": {
         "type": "object",
         "type": "object",
         "properties": {
         "properties": {
@@ -6887,19 +6891,19 @@
             "$ref": "#/components/schemas/Event.permission.replied"
             "$ref": "#/components/schemas/Event.permission.replied"
           },
           },
           {
           {
-            "$ref": "#/components/schemas/Event.session.status"
+            "$ref": "#/components/schemas/Event.file.edited"
           },
           },
           {
           {
-            "$ref": "#/components/schemas/Event.session.idle"
+            "$ref": "#/components/schemas/Event.todo.updated"
           },
           },
           {
           {
-            "$ref": "#/components/schemas/Event.session.compacted"
+            "$ref": "#/components/schemas/Event.session.status"
           },
           },
           {
           {
-            "$ref": "#/components/schemas/Event.file.edited"
+            "$ref": "#/components/schemas/Event.session.idle"
           },
           },
           {
           {
-            "$ref": "#/components/schemas/Event.todo.updated"
+            "$ref": "#/components/schemas/Event.session.compacted"
           },
           },
           {
           {
             "$ref": "#/components/schemas/Event.command.executed"
             "$ref": "#/components/schemas/Event.command.executed"
@@ -8920,7 +8924,10 @@
             "type": "string",
             "type": "string",
             "enum": ["subagent", "primary", "all"]
             "enum": ["subagent", "primary", "all"]
           },
           },
-          "builtIn": {
+          "native": {
+            "type": "boolean"
+          },
+          "hidden": {
             "type": "boolean"
             "type": "boolean"
           },
           },
           "topP": {
           "topP": {
@@ -9001,7 +9008,7 @@
             "maximum": 9007199254740991
             "maximum": 9007199254740991
           }
           }
         },
         },
-        "required": ["name", "mode", "builtIn", "permission", "tools", "options"]
+        "required": ["name", "mode", "permission", "tools", "options"]
       },
       },
       "MCPStatusConnected": {
       "MCPStatusConnected": {
         "type": "object",
         "type": "object",