Просмотр исходного кода

better interleaved thinking support (#5298)

Aiden Cline 4 месяцев назад
Родитель
Сommit
df64612d54

+ 1 - 3
.opencode/opencode.jsonc

@@ -7,9 +7,7 @@
   "instructions": ["STYLE_GUIDE.md"],
   "instructions": ["STYLE_GUIDE.md"],
   "provider": {
   "provider": {
     "opencode": {
     "opencode": {
-      "options": {
-        // "baseURL": "http://localhost:8080",
-      },
+      "options": {},
     },
     },
   },
   },
   "mcp": {
   "mcp": {

+ 0 - 1
packages/opencode/src/acp/agent.ts

@@ -25,7 +25,6 @@ import { Provider } from "../provider/provider"
 import { Installation } from "@/installation"
 import { Installation } from "@/installation"
 import { MessageV2 } from "@/session/message-v2"
 import { MessageV2 } from "@/session/message-v2"
 import { Config } from "@/config/config"
 import { Config } from "@/config/config"
-import { MCP } from "@/mcp"
 import { Todo } from "@/session/todo"
 import { Todo } from "@/session/todo"
 import { z } from "zod"
 import { z } from "zod"
 import { LoadAPIKeyError } from "ai"
 import { LoadAPIKeyError } from "ai"

+ 9 - 1
packages/opencode/src/lsp/server.ts

@@ -211,7 +211,15 @@ export namespace LSPServer {
 
 
   export const Biome: Info = {
   export const Biome: Info = {
     id: "biome",
     id: "biome",
-    root: NearestRoot(["biome.json", "biome.jsonc", "package-lock.json", "bun.lockb", "bun.lock", "pnpm-lock.yaml", "yarn.lock"]),
+    root: NearestRoot([
+      "biome.json",
+      "biome.jsonc",
+      "package-lock.json",
+      "bun.lockb",
+      "bun.lock",
+      "pnpm-lock.yaml",
+      "yarn.lock",
+    ]),
     extensions: [
     extensions: [
       ".ts",
       ".ts",
       ".tsx",
       ".tsx",

+ 10 - 0
packages/opencode/src/provider/models.ts

@@ -17,6 +17,16 @@ export namespace ModelsDev {
     reasoning: z.boolean(),
     reasoning: z.boolean(),
     temperature: z.boolean(),
     temperature: z.boolean(),
     tool_call: z.boolean(),
     tool_call: z.boolean(),
+    interleaved: z
+      .union([
+        z.literal(true),
+        z
+          .object({
+            field: z.enum(["reasoning_content", "reasoning_details"]),
+          })
+          .strict(),
+      ])
+      .optional(),
     cost: z
     cost: z
       .object({
       .object({
         input: z.number(),
         input: z.number(),

+ 8 - 0
packages/opencode/src/provider/provider.ts

@@ -349,6 +349,12 @@ export namespace Provider {
           video: z.boolean(),
           video: z.boolean(),
           pdf: z.boolean(),
           pdf: z.boolean(),
         }),
         }),
+        interleaved: z.union([
+          z.boolean(),
+          z.object({
+            field: z.enum(["reasoning_content", "reasoning_details"]),
+          }),
+        ]),
       }),
       }),
       cost: z.object({
       cost: z.object({
         input: z.number(),
         input: z.number(),
@@ -450,6 +456,7 @@ export namespace Provider {
           video: model.modalities?.output?.includes("video") ?? false,
           video: model.modalities?.output?.includes("video") ?? false,
           pdf: model.modalities?.output?.includes("pdf") ?? false,
           pdf: model.modalities?.output?.includes("pdf") ?? false,
         },
         },
+        interleaved: model.interleaved ?? false,
       },
       },
     }
     }
   }
   }
@@ -567,6 +574,7 @@ export namespace Provider {
               video: model.modalities?.output?.includes("video") ?? existingModel?.capabilities.output.video ?? false,
               video: model.modalities?.output?.includes("video") ?? existingModel?.capabilities.output.video ?? false,
               pdf: model.modalities?.output?.includes("pdf") ?? existingModel?.capabilities.output.pdf ?? false,
               pdf: model.modalities?.output?.includes("pdf") ?? existingModel?.capabilities.output.pdf ?? false,
             },
             },
+            interleaved: model.interleaved ?? false,
           },
           },
           cost: {
           cost: {
             input: model?.cost?.input ?? existingModel?.cost?.input ?? 0,
             input: model?.cost?.input ?? existingModel?.cost?.input ?? 0,

+ 17 - 1
packages/opencode/src/provider/transform.ts

@@ -273,7 +273,23 @@ export namespace ProviderTransform {
     return options
     return options
   }
   }
 
 
-  export function providerOptions(model: Provider.Model, options: { [x: string]: any }) {
+  export function providerOptions(model: Provider.Model, options: { [x: string]: any }, messages: ModelMessage[]) {
+    if (model.capabilities.interleaved && typeof model.capabilities.interleaved === "object") {
+      const cot = []
+      const assistantMessages = messages.filter((msg) => msg.role === "assistant")
+      for (const msg of assistantMessages) {
+        for (const part of msg.content) {
+          if (typeof part === "string") {
+            continue
+          }
+          if (part.type === "reasoning") {
+            cot.push(part)
+          }
+        }
+      }
+      options[model.capabilities.interleaved.field] = cot
+    }
+
     switch (model.api.npm) {
     switch (model.api.npm) {
       case "@ai-sdk/openai":
       case "@ai-sdk/openai":
       case "@ai-sdk/azure":
       case "@ai-sdk/azure":

+ 1 - 0
packages/opencode/src/session/compaction.ts

@@ -143,6 +143,7 @@ export namespace SessionCompaction {
       providerOptions: ProviderTransform.providerOptions(
       providerOptions: ProviderTransform.providerOptions(
         model,
         model,
         pipe({}, mergeDeep(ProviderTransform.options(model, input.sessionID)), mergeDeep(model.options)),
         pipe({}, mergeDeep(ProviderTransform.options(model, input.sessionID)), mergeDeep(model.options)),
+        [],
       ),
       ),
       headers: model.headers,
       headers: model.headers,
       abortSignal: input.abort,
       abortSignal: input.abort,

+ 34 - 33
packages/opencode/src/session/prompt.ts

@@ -515,6 +515,37 @@ export namespace SessionPrompt {
         })
         })
       }
       }
 
 
+      const messages = [
+        ...system.map(
+          (x): ModelMessage => ({
+            role: "system",
+            content: x,
+          }),
+        ),
+        ...MessageV2.toModelMessage(
+          msgs.filter((m) => {
+            if (m.info.role !== "assistant" || m.info.error === undefined) {
+              return true
+            }
+            if (
+              MessageV2.AbortedError.isInstance(m.info.error) &&
+              m.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning")
+            ) {
+              return true
+            }
+
+            return false
+          }),
+        ),
+        ...(isLastStep
+          ? [
+              {
+                role: "assistant" as const,
+                content: MAX_STEPS,
+              },
+            ]
+          : []),
+      ]
       const result = await processor.process({
       const result = await processor.process({
         onError(error) {
         onError(error) {
           log.error("stream error", {
           log.error("stream error", {
@@ -562,42 +593,12 @@ export namespace SessionPrompt {
           OUTPUT_TOKEN_MAX,
           OUTPUT_TOKEN_MAX,
         ),
         ),
         abortSignal: abort,
         abortSignal: abort,
-        providerOptions: ProviderTransform.providerOptions(model, params.options),
+        providerOptions: ProviderTransform.providerOptions(model, params.options, messages),
         stopWhen: stepCountIs(1),
         stopWhen: stepCountIs(1),
         temperature: params.temperature,
         temperature: params.temperature,
         topP: params.topP,
         topP: params.topP,
         toolChoice: isLastStep ? "none" : undefined,
         toolChoice: isLastStep ? "none" : undefined,
-        messages: [
-          ...system.map(
-            (x): ModelMessage => ({
-              role: "system",
-              content: x,
-            }),
-          ),
-          ...MessageV2.toModelMessage(
-            msgs.filter((m) => {
-              if (m.info.role !== "assistant" || m.info.error === undefined) {
-                return true
-              }
-              if (
-                MessageV2.AbortedError.isInstance(m.info.error) &&
-                m.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning")
-              ) {
-                return true
-              }
-
-              return false
-            }),
-          ),
-          ...(isLastStep
-            ? [
-                {
-                  role: "assistant" as const,
-                  content: MAX_STEPS,
-                },
-              ]
-            : []),
-        ],
+        messages,
         tools: model.capabilities.toolcall === false ? undefined : tools,
         tools: model.capabilities.toolcall === false ? undefined : tools,
         model: wrapLanguageModel({
         model: wrapLanguageModel({
           model: language,
           model: language,
@@ -1464,7 +1465,7 @@ export namespace SessionPrompt {
     await generateText({
     await generateText({
       // use higher # for reasoning models since reasoning tokens eat up a lot of the budget
       // use higher # for reasoning models since reasoning tokens eat up a lot of the budget
       maxOutputTokens: small.capabilities.reasoning ? 3000 : 20,
       maxOutputTokens: small.capabilities.reasoning ? 3000 : 20,
-      providerOptions: ProviderTransform.providerOptions(small, options),
+      providerOptions: ProviderTransform.providerOptions(small, options, []),
       messages: [
       messages: [
         ...SystemPrompt.title(small.providerID).map(
         ...SystemPrompt.title(small.providerID).map(
           (x): ModelMessage => ({
           (x): ModelMessage => ({

+ 2 - 2
packages/opencode/src/session/summary.ts

@@ -91,7 +91,7 @@ export namespace SessionSummary {
     if (textPart && !userMsg.summary?.title) {
     if (textPart && !userMsg.summary?.title) {
       const result = await generateText({
       const result = await generateText({
         maxOutputTokens: small.capabilities.reasoning ? 1500 : 20,
         maxOutputTokens: small.capabilities.reasoning ? 1500 : 20,
-        providerOptions: ProviderTransform.providerOptions(small, options),
+        providerOptions: ProviderTransform.providerOptions(small, options, []),
         messages: [
         messages: [
           ...SystemPrompt.title(small.providerID).map(
           ...SystemPrompt.title(small.providerID).map(
             (x): ModelMessage => ({
             (x): ModelMessage => ({
@@ -144,7 +144,7 @@ export namespace SessionSummary {
         const result = await generateText({
         const result = await generateText({
           model: language,
           model: language,
           maxOutputTokens: 100,
           maxOutputTokens: 100,
-          providerOptions: ProviderTransform.providerOptions(small, options),
+          providerOptions: ProviderTransform.providerOptions(small, options, []),
           messages: [
           messages: [
             ...SystemPrompt.summarize(small.providerID).map(
             ...SystemPrompt.summarize(small.providerID).map(
               (x): ModelMessage => ({
               (x): ModelMessage => ({

+ 4 - 0
packages/opencode/test/provider/transform.test.ts

@@ -130,6 +130,7 @@ describe("ProviderTransform.message - DeepSeek reasoning content", () => {
         toolcall: true,
         toolcall: true,
         input: { text: true, audio: false, image: false, video: false, pdf: false },
         input: { text: true, audio: false, image: false, video: false, pdf: false },
         output: { text: true, audio: false, image: false, video: false, pdf: false },
         output: { text: true, audio: false, image: false, video: false, pdf: false },
+        interleaved: false,
       },
       },
       cost: {
       cost: {
         input: 0.001,
         input: 0.001,
@@ -184,6 +185,7 @@ describe("ProviderTransform.message - DeepSeek reasoning content", () => {
         toolcall: true,
         toolcall: true,
         input: { text: true, audio: false, image: false, video: false, pdf: false },
         input: { text: true, audio: false, image: false, video: false, pdf: false },
         output: { text: true, audio: false, image: false, video: false, pdf: false },
         output: { text: true, audio: false, image: false, video: false, pdf: false },
+        interleaved: false,
       },
       },
       cost: {
       cost: {
         input: 0.001,
         input: 0.001,
@@ -236,6 +238,7 @@ describe("ProviderTransform.message - DeepSeek reasoning content", () => {
         toolcall: true,
         toolcall: true,
         input: { text: true, audio: false, image: false, video: false, pdf: false },
         input: { text: true, audio: false, image: false, video: false, pdf: false },
         output: { text: true, audio: false, image: false, video: false, pdf: false },
         output: { text: true, audio: false, image: false, video: false, pdf: false },
+        interleaved: false,
       },
       },
       cost: {
       cost: {
         input: 0.001,
         input: 0.001,
@@ -281,6 +284,7 @@ describe("ProviderTransform.message - DeepSeek reasoning content", () => {
         toolcall: true,
         toolcall: true,
         input: { text: true, audio: false, image: true, video: false, pdf: false },
         input: { text: true, audio: false, image: true, video: false, pdf: false },
         output: { text: true, audio: false, image: false, video: false, pdf: false },
         output: { text: true, audio: false, image: false, video: false, pdf: false },
+        interleaved: false,
       },
       },
       cost: {
       cost: {
         input: 0.03,
         input: 0.03,

Разница между файлами не показана из-за своего большого размера
+ 106 - 361
packages/sdk/js/openapi.json


+ 15 - 0
packages/sdk/js/src/v2/gen/types.gen.ts

@@ -1044,6 +1044,11 @@ export type ProviderConfig = {
       reasoning?: boolean
       reasoning?: boolean
       temperature?: boolean
       temperature?: boolean
       tool_call?: boolean
       tool_call?: boolean
+      interleaved?:
+        | true
+        | {
+            field: "reasoning_content" | "reasoning_details"
+          }
       cost?: {
       cost?: {
         input: number
         input: number
         output: number
         output: number
@@ -1479,6 +1484,11 @@ export type Model = {
       video: boolean
       video: boolean
       pdf: boolean
       pdf: boolean
     }
     }
+    interleaved:
+      | boolean
+      | {
+          field: "reasoning_content" | "reasoning_details"
+        }
   }
   }
   cost: {
   cost: {
     input: number
     input: number
@@ -3026,6 +3036,11 @@ export type ProviderListResponses = {
           reasoning: boolean
           reasoning: boolean
           temperature: boolean
           temperature: boolean
           tool_call: boolean
           tool_call: boolean
+          interleaved?:
+            | true
+            | {
+                field: "reasoning_content" | "reasoning_details"
+              }
           cost?: {
           cost?: {
             input: number
             input: number
             output: number
             output: number

Некоторые файлы не были показаны из-за большого количества измененных файлов