소스 검색

enable prompt caching for anthropic

Dax Raad 8 달 전
부모
커밋
1a553e525f
2개의 변경된 파일34개의 추가작업 그리고 5개의 파일을 삭제
  1. 0 3
      packages/opencode/src/provider/provider.ts
  2. 34 2
      packages/opencode/src/session/index.ts

+ 0 - 3
packages/opencode/src/provider/provider.ts

@@ -268,7 +268,6 @@ export namespace Provider {
   }
 
   const TOOLS = [
-    BashTool,
     EditTool,
     WebFetchTool,
     GlobTool,
@@ -281,9 +280,7 @@ export namespace Provider {
     EditTool,
     // MultiEditTool,
     WriteTool,
-    TodoWriteTool,
     TaskTool,
-    TodoReadTool,
   ]
   const TOOL_MAPPING: Record<string, Tool.Info[]> = {
     anthropic: TOOLS.filter((t) => t.id !== "opencode.patch"),

+ 34 - 2
packages/opencode/src/session/index.ts

@@ -13,6 +13,7 @@ import {
   type LanguageModelUsage,
   type CoreMessage,
   type UIMessage,
+  type LanguageModelV1Middleware,
 } from "ai"
 import { z, ZodSchema } from "zod"
 import { Decimal } from "decimal.js"
@@ -234,6 +235,13 @@ export namespace Session {
             (x): CoreMessage => ({
               role: "system",
               content: x,
+              providerOptions: {
+                ...(input.providerID === "anthropic"
+                  ? {
+                      cacheControl: { type: "ephemeral" },
+                    }
+                  : {}),
+              },
             }),
           ),
           ...convertToCoreMessages([
@@ -270,7 +278,7 @@ export namespace Session {
     msgs.push(msg)
 
     const system = input.system ?? SystemPrompt.provider(input.providerID)
-    system.push(...(await SystemPrompt.environment(input.sessionID)))
+    system.push(...(await SystemPrompt.environment()))
     system.push(...(await SystemPrompt.custom()))
 
     const next: Message.Info = {
@@ -379,6 +387,24 @@ export namespace Session {
     }
 
     let text: Message.TextPart | undefined
+    await Bun.write(
+      "/tmp/message.json",
+      JSON.stringify(
+        [
+          ...system.map(
+            (x): CoreMessage => ({
+              role: "system",
+              content: x,
+            }),
+          ),
+          ...convertToCoreMessages(
+            msgs.map(toUIMessage).filter((x) => x.parts.length > 0),
+          ),
+        ],
+        null,
+        2,
+      ),
+    )
     const result = streamText({
       onStepFinish: async (step) => {
         log.info("step finish", {
@@ -447,6 +473,13 @@ export namespace Session {
           (x): CoreMessage => ({
             role: "system",
             content: x,
+            providerOptions: {
+              ...(input.providerID === "anthropic"
+                ? {
+                    cacheControl: { type: "ephemeral" },
+                  }
+                : {}),
+            },
           }),
         ),
         ...convertToCoreMessages(
@@ -455,7 +488,6 @@ export namespace Session {
       ],
       temperature: model.info.id === "codex-mini-latest" ? undefined : 0,
       tools: {
-        ...(await MCP.tools()),
         ...tools,
       },
       model: model.language,