Dax Raad 8 месяцев назад
Родитель
Сommit
f072ab3276

+ 6 - 1
packages/opencode/src/config/config.ts

@@ -14,7 +14,12 @@ export namespace Config {
 
 
   export const Info = z
   export const Info = z
     .object({
     .object({
-      providers: Provider.Info.array().optional(),
+      provider: Provider.Info.array().optional(),
+      tool: z
+        .object({
+          provider: z.record(z.string(), z.string().array()).optional(),
+        })
+        .optional(),
     })
     })
     .strict()
     .strict()
 
 

+ 10 - 6
packages/opencode/src/index.ts

@@ -10,6 +10,7 @@ import { Share } from "./share/share"
 import { LLM } from "./llm/llm"
 import { LLM } from "./llm/llm"
 import { Message } from "./session/message"
 import { Message } from "./session/message"
 import { Global } from "./global"
 import { Global } from "./global"
+import { Provider } from "./provider/provider"
 
 
 const cli = cac("opencode")
 const cli = cac("opencode")
 
 
@@ -79,14 +80,17 @@ cli
         unsub()
         unsub()
       })
       })
 
 
-      const providers = await LLM.providers()
-      const providerID = Object.keys(providers)[0]
-      const modelID = providers[providerID].info.models[0].id
-      console.log("using", providerID, modelID)
+      const [provider] = await Provider.active().then((val) =>
+        val.values().toArray(),
+      )
+      if (!provider) throw new Error("no providers found")
+      const model = provider.models[0]
+      if (!model) throw new Error("no models found")
+      console.log("using", provider.id, model.id)
       const result = await Session.chat({
       const result = await Session.chat({
         sessionID: session.id,
         sessionID: session.id,
-        providerID,
-        modelID,
+        providerID: provider.id,
+        modelID: model.id,
         parts: [
         parts: [
           {
           {
             type: "text",
             type: "text",

+ 0 - 172
packages/opencode/src/llm/llm.ts

@@ -1,172 +0,0 @@
-import { App } from "../app/app"
-import { Log } from "../util/log"
-import { concat } from "remeda"
-import path from "path"
-import { Provider } from "../provider/provider"
-
-import type { LanguageModel, Provider as ProviderInstance } from "ai"
-import { NoSuchModelError } from "ai"
-import { Config } from "../config/config"
-import { BunProc } from "../bun"
-import { Global } from "../global"
-
-export namespace LLM {
-  const log = Log.create({ service: "llm" })
-
-  export class ModelNotFoundError extends Error {
-    constructor(public readonly model: string) {
-      super()
-    }
-  }
-
-  const NATIVE_PROVIDERS: Provider.Info[] = [
-    {
-      id: "anthropic",
-      name: "Anthropic",
-      models: [
-        {
-          id: "claude-sonnet-4-20250514",
-          name: "Claude Sonnet 4",
-          cost: {
-            input: 3.0 / 1_000_000,
-            output: 15.0 / 1_000_000,
-            inputCached: 3.75 / 1_000_000,
-            outputCached: 0.3 / 1_000_000,
-          },
-          contextWindow: 200_000,
-          maxOutputTokens: 50_000,
-          attachment: true,
-        },
-      ],
-    },
-    {
-      id: "openai",
-      name: "OpenAI",
-      models: [
-        {
-          id: "codex-mini-latest",
-          name: "Codex Mini",
-          cost: {
-            input: 1.5 / 1_000_000,
-            inputCached: 0.375 / 1_000_000,
-            output: 6.0 / 1_000_000,
-            outputCached: 0.0 / 1_000_000,
-          },
-          contextWindow: 200_000,
-          maxOutputTokens: 100_000,
-          attachment: true,
-          reasoning: true,
-        },
-      ],
-    },
-    {
-      id: "google",
-      name: "Google",
-      models: [
-        {
-          id: "gemini-2.5-pro-preview-03-25",
-          name: "Gemini 2.5 Pro",
-          cost: {
-            input: 1.25 / 1_000_000,
-            inputCached: 0 / 1_000_000,
-            output: 10 / 1_000_000,
-            outputCached: 0 / 1_000_000,
-          },
-          contextWindow: 1_000_000,
-          maxOutputTokens: 50_000,
-          attachment: true,
-        },
-      ],
-    },
-  ]
-
-  const AUTODETECT: Record<string, string[]> = {
-    anthropic: ["ANTHROPIC_API_KEY"],
-    openai: ["OPENAI_API_KEY"],
-    google: ["GOOGLE_GENERATIVE_AI_API_KEY", "GEMINI_API_KEY"],
-  }
-
-  const state = App.state("llm", async () => {
-    const config = await Config.get()
-    const providers: Record<
-      string,
-      {
-        info: Provider.Info
-        instance: ProviderInstance
-      }
-    > = {}
-    const models = new Map<
-      string,
-      { info: Provider.Model; instance: LanguageModel }
-    >()
-
-    const list = concat(NATIVE_PROVIDERS, config.providers ?? [])
-
-    for (const provider of list) {
-      if (
-        !config.providers?.find((p) => p.id === provider.id) &&
-        !AUTODETECT[provider.id]?.some((env) => process.env[env])
-      )
-        continue
-      const dir = path.join(
-        Global.cache(),
-        `node_modules`,
-        `@ai-sdk`,
-        provider.id,
-      )
-      if (!(await Bun.file(path.join(dir, "package.json")).exists())) {
-        BunProc.run(["add", `@ai-sdk/${provider.id}@alpha`], {
-          cwd: Global.cache(),
-        })
-      }
-      const mod = await import(
-        path.join(Global.cache(), `node_modules`, `@ai-sdk`, provider.id)
-      )
-      const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!]
-      const loaded = fn(provider.options)
-      log.info("loaded", { provider: provider.id })
-      providers[provider.id] = {
-        info: provider,
-        instance: loaded,
-      }
-    }
-
-    return {
-      models,
-      providers,
-    }
-  })
-
-  export async function providers() {
-    return state().then((state) => state.providers)
-  }
-
-  export async function findModel(providerID: string, modelID: string) {
-    const key = `${providerID}/${modelID}`
-    const s = await state()
-    if (s.models.has(key)) return s.models.get(key)!
-    const provider = s.providers[providerID]
-    if (!provider) throw new ModelNotFoundError(modelID)
-    log.info("loading", {
-      providerID,
-      modelID,
-    })
-    const info = provider.info.models.find((m) => m.id === modelID)
-    if (!info) throw new ModelNotFoundError(modelID)
-    try {
-      const match = provider.instance.languageModel(modelID)
-      log.info("found", { providerID, modelID })
-      s.models.set(key, {
-        info,
-        instance: match,
-      })
-      return {
-        info,
-        instance: match,
-      }
-    } catch (e) {
-      if (e instanceof NoSuchModelError) throw new ModelNotFoundError(modelID)
-      throw e
-    }
-  }
-}

+ 62 - 0
packages/opencode/src/provider/database.ts

@@ -0,0 +1,62 @@
+import type { Provider } from "./provider"
+
+export const PROVIDER_DATABASE: Provider.Info[] = [
+  {
+    id: "anthropic",
+    name: "Anthropic",
+    models: [
+      {
+        id: "claude-sonnet-4-20250514",
+        name: "Claude Sonnet 4",
+        cost: {
+          input: 3.0 / 1_000_000,
+          output: 15.0 / 1_000_000,
+          inputCached: 3.75 / 1_000_000,
+          outputCached: 0.3 / 1_000_000,
+        },
+        contextWindow: 200_000,
+        maxOutputTokens: 50_000,
+        attachment: true,
+      },
+    ],
+  },
+  {
+    id: "openai",
+    name: "OpenAI",
+    models: [
+      {
+        id: "codex-mini-latest",
+        name: "Codex Mini",
+        cost: {
+          input: 1.5 / 1_000_000,
+          inputCached: 0.375 / 1_000_000,
+          output: 6.0 / 1_000_000,
+          outputCached: 0.0 / 1_000_000,
+        },
+        contextWindow: 200_000,
+        maxOutputTokens: 100_000,
+        attachment: true,
+        reasoning: true,
+      },
+    ],
+  },
+  {
+    id: "google",
+    name: "Google",
+    models: [
+      {
+        id: "gemini-2.5-pro-preview-03-25",
+        name: "Gemini 2.5 Pro",
+        cost: {
+          input: 1.25 / 1_000_000,
+          inputCached: 0 / 1_000_000,
+          output: 10 / 1_000_000,
+          outputCached: 0 / 1_000_000,
+        },
+        contextWindow: 1_000_000,
+        maxOutputTokens: 50_000,
+        attachment: true,
+      },
+    ],
+  },
+]

+ 103 - 0
packages/opencode/src/provider/provider.ts

@@ -1,6 +1,16 @@
 import z from "zod"
 import z from "zod"
+import { App } from "../app/app"
+import { Config } from "../config/config"
+import { PROVIDER_DATABASE } from "./database"
+import { NoSuchModelError, type LanguageModel, type Provider as SDK } from "ai"
+import { Log } from "../util/log"
+import path from "path"
+import { Global } from "../global"
+import { BunProc } from "../bun"
 
 
 export namespace Provider {
 export namespace Provider {
+  const log = Log.create({ service: "provider" })
+
   export const Model = z
   export const Model = z
     .object({
     .object({
       id: z.string(),
       id: z.string(),
@@ -32,4 +42,97 @@ export namespace Provider {
       ref: "Provider.Info",
       ref: "Provider.Info",
     })
     })
   export type Info = z.output<typeof Info>
   export type Info = z.output<typeof Info>
+
+  const AUTODETECT: Record<string, string[]> = {
+    anthropic: ["ANTHROPIC_API_KEY"],
+    openai: ["OPENAI_API_KEY"],
+    google: ["GOOGLE_GENERATIVE_AI_API_KEY", "GEMINI_API_KEY"],
+  }
+
+  const state = App.state("provider", async () => {
+    const config = await Config.get()
+    const providers = new Map<string, Info>()
+    const models = new Map<string, { info: Model; language: LanguageModel }>()
+    const sdk = new Map<string, SDK>()
+
+    for (const item of PROVIDER_DATABASE) {
+      if (!AUTODETECT[item.id].some((env) => process.env[env])) continue
+      providers.set(item.id, item)
+    }
+
+    for (const item of config.provider ?? []) {
+      providers.set(item.id, item)
+    }
+
+    return {
+      models,
+      providers,
+      sdk,
+    }
+  })
+
+  export async function active() {
+    return state().then((state) => state.providers)
+  }
+
+  async function getSDK(providerID: string) {
+    const s = await state()
+    if (s.sdk.has(providerID)) return s.sdk.get(providerID)!
+
+    const dir = path.join(Global.cache(), `node_modules`, `@ai-sdk`, providerID)
+    if (!(await Bun.file(path.join(dir, "package.json")).exists())) {
+      log.info("installing", {
+        providerID,
+      })
+      BunProc.run(["add", `@ai-sdk/${providerID}@alpha`], {
+        cwd: Global.cache(),
+      })
+    }
+    const mod = await import(path.join(dir))
+    const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!]
+    const loaded = fn(s.providers.get(providerID)?.options)
+    s.sdk.set(providerID, loaded)
+    return loaded as SDK
+  }
+
+  export async function getModel(providerID: string, modelID: string) {
+    const key = `${providerID}/${modelID}`
+    const s = await state()
+    if (s.models.has(key)) return s.models.get(key)!
+
+    log.info("loading", {
+      providerID,
+      modelID,
+    })
+
+    const provider = s.providers.get(providerID)
+    if (!provider) throw new ModelNotFoundError(modelID)
+    const info = provider.models.find((m) => m.id === modelID)
+    if (!info) throw new ModelNotFoundError(modelID)
+
+    const sdk = await getSDK(providerID)
+    if (!sdk) throw new ModelNotFoundError(modelID)
+
+    try {
+      const language = sdk.languageModel(modelID)
+      log.info("found", { providerID, modelID })
+      s.models.set(key, {
+        info,
+        language,
+      })
+      return {
+        info,
+        language,
+      }
+    } catch (e) {
+      if (e instanceof NoSuchModelError) throw new ModelNotFoundError(modelID)
+      throw e
+    }
+  }
+
+  class ModelNotFoundError extends Error {
+    constructor(public readonly model: string) {
+      super()
+    }
+  }
 }
 }

+ 2 - 6
packages/opencode/src/server/server.ts

@@ -268,12 +268,8 @@ export namespace Server {
           },
           },
         }),
         }),
         async (c) => {
         async (c) => {
-          const providers = await LLM.providers()
-          const result = [] as (Provider.Info & { key: string })[]
-          for (const [key, provider] of Object.entries(providers)) {
-            result.push({ ...provider.info, key })
-          }
-          return c.json(result)
+          const providers = await Provider.active()
+          return c.json(providers.values().toArray())
         },
         },
       )
       )
 
 

+ 6 - 7
packages/opencode/src/session/session.ts

@@ -1,7 +1,6 @@
 import path from "path"
 import path from "path"
 import { App } from "../app/app"
 import { App } from "../app/app"
 import { Identifier } from "../id/id"
 import { Identifier } from "../id/id"
-import { LLM } from "../llm/llm"
 import { Storage } from "../storage/storage"
 import { Storage } from "../storage/storage"
 import { Log } from "../util/log"
 import { Log } from "../util/log"
 import {
 import {
@@ -22,7 +21,7 @@ import PROMPT_SUMMARIZE from "./prompt/summarize.txt"
 import { Share } from "../share/share"
 import { Share } from "../share/share"
 import { Message } from "./message"
 import { Message } from "./message"
 import { Bus } from "../bus"
 import { Bus } from "../bus"
-import type { Provider } from "../provider/provider"
+import { Provider } from "../provider/provider"
 
 
 export namespace Session {
 export namespace Session {
   const log = Log.create({ service: "session" })
   const log = Log.create({ service: "session" })
@@ -171,7 +170,7 @@ export namespace Session {
   }) {
   }) {
     const l = log.clone().tag("session", input.sessionID)
     const l = log.clone().tag("session", input.sessionID)
     l.info("chatting")
     l.info("chatting")
-    const model = await LLM.findModel(input.providerID, input.modelID)
+    const model = await Provider.getModel(input.providerID, input.modelID)
     let msgs = await messages(input.sessionID)
     let msgs = await messages(input.sessionID)
     const previous = msgs.at(-1)
     const previous = msgs.at(-1)
     if (previous?.metadata.assistant) {
     if (previous?.metadata.assistant) {
@@ -245,7 +244,7 @@ export namespace Session {
             parts: input.parts,
             parts: input.parts,
           },
           },
         ]),
         ]),
-        model: model.instance,
+        model: model.language,
       }).then((result) => {
       }).then((result) => {
         return Session.update(input.sessionID, (draft) => {
         return Session.update(input.sessionID, (draft) => {
           draft.title = result.text
           draft.title = result.text
@@ -305,7 +304,7 @@ export namespace Session {
       messages: convertToModelMessages(msgs),
       messages: convertToModelMessages(msgs),
       temperature: 0,
       temperature: 0,
       tools,
       tools,
-      model: model.instance,
+      model: model.language,
     })
     })
     let text: Message.TextPart | undefined
     let text: Message.TextPart | undefined
     const reader = result.toUIMessageStream().getReader()
     const reader = result.toUIMessageStream().getReader()
@@ -402,7 +401,7 @@ export namespace Session {
     const filtered = msgs.filter(
     const filtered = msgs.filter(
       (msg) => msg.role !== "system" && (!lastSummary || msg.id >= lastSummary),
       (msg) => msg.role !== "system" && (!lastSummary || msg.id >= lastSummary),
     )
     )
-    const model = await LLM.findModel(input.providerID, input.modelID)
+    const model = await Provider.getModel(input.providerID, input.modelID)
     const next: Message.Info = {
     const next: Message.Info = {
       id: Identifier.ascending("message"),
       id: Identifier.ascending("message"),
       role: "assistant",
       role: "assistant",
@@ -429,7 +428,7 @@ export namespace Session {
     await updateMessage(next)
     await updateMessage(next)
     const result = await generateText({
     const result = await generateText({
       abortSignal: abort.signal,
       abortSignal: abort.signal,
-      model: model.instance,
+      model: model.language,
       messages: convertToModelMessages([
       messages: convertToModelMessages([
         {
         {
           role: "system",
           role: "system",

+ 13 - 2
packages/opencode/src/tool/tool.ts

@@ -1,9 +1,10 @@
 import { tool, type Tool as AITool } from "ai"
 import { tool, type Tool as AITool } from "ai"
 import { Log } from "../util/log"
 import { Log } from "../util/log"
-
-const log = Log.create({ service: "tool" })
+import { Config } from "../config/config"
 
 
 export namespace Tool {
 export namespace Tool {
+  const log = Log.create({ service: "tool" })
+
   export interface Metadata<
   export interface Metadata<
     Properties extends Record<string, any> = Record<string, any>,
     Properties extends Record<string, any> = Record<string, any>,
   > {
   > {
@@ -13,6 +14,15 @@ export namespace Tool {
       end: number
       end: number
     }
     }
   }
   }
+
+  const TOOL_MAPPING: Record<string, string[]> = {
+    anthropic: [],
+  }
+  export async function forProvider(providerID: string) {
+    const config = await Config.get()
+    const match = config.tool?.provider?.[providerID] ?? []
+  }
+
   export function define<
   export function define<
     Params,
     Params,
     Output extends { metadata?: any; output: any },
     Output extends { metadata?: any; output: any },
@@ -51,6 +61,7 @@ export namespace Tool {
           return {
           return {
             metadata: {
             metadata: {
               error: true,
               error: true,
+              message: e.toString(),
             },
             },
             output: "An error occurred: " + e.toString(),
             output: "An error occurred: " + e.toString(),
           }
           }