2
0
Эх сурвалжийг харах

pass through model.options properly without having to nest it under provider name. you may have to update your configs see https://opencode.ai/docs/models/#openrouter for an example

Dax Raad 7 сар өмнө
parent
commit
86d5b25d18

+ 14 - 0
opencode.json

@@ -1,5 +1,19 @@
 {
   "$schema": "https://opencode.ai/config.json",
+  "provider": {
+    "openrouter": {
+      "models": {
+        "moonshotai/kimi-k2": {
+          "options": {
+            "provider": {
+              "order": ["baseten"],
+              "allow_fallbacks": false
+            }
+          }
+        }
+      }
+    }
+  },
   "mcp": {
     "weather": {
       "type": "local",

+ 4 - 1
packages/opencode/src/provider/provider.ts

@@ -367,7 +367,10 @@ export namespace Provider {
       const pkg = provider.npm ?? provider.id
       const mod = await import(await BunProc.install(pkg, "beta"))
       const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!]
-      const loaded = fn(s.providers[provider.id]?.options)
+      const loaded = fn({
+        name: provider.id,
+        ...s.providers[provider.id]?.options,
+      })
       s.sdk.set(provider.id, loaded)
       return loaded as SDK
     })().catch((e) => {

+ 6 - 2
packages/opencode/src/session/index.ts

@@ -540,7 +540,9 @@ export namespace Session {
       const small = (await Provider.getSmallModel(input.providerID)) ?? model
       generateText({
         maxOutputTokens: input.providerID === "google" ? 1024 : 20,
-        providerOptions: small.info.options,
+        providerOptions: {
+          [input.providerID]: small.info.options,
+        },
         messages: [
           ...SystemPrompt.title(input.providerID).map(
             (x): ModelMessage => ({
@@ -685,7 +687,9 @@ export namespace Session {
       maxOutputTokens: outputLimit,
       abortSignal: abort.signal,
       stopWhen: stepCountIs(1000),
-      providerOptions: model.info.options,
+      providerOptions: {
+        [input.providerID]: model.info.options,
+      },
       messages: [
         ...system.map(
           (x): ModelMessage => ({

+ 42 - 0
packages/web/src/content/docs/docs/models.mdx

@@ -61,6 +61,48 @@ You can customize the base URL for any provider by setting the `baseURL` option.
 
 ---
 
+### OpenRouter
+
+Many OpenRouter models are preloaded by default - you can customize these or add your own.
+
+Here's an example of specifying a provider
+
+```json title="opencode.json"
+{
+  "$schema": "https://opencode.ai/config.json",
+  "provider": {
+    "openrouter": {
+      "models": {
+        "moonshotai/kimi-k2": {
+          "options": {
+            "provider": {
+              "order": ["baseten"],
+              "allow_fallbacks": false
+            }
+          }
+        }
+      }
+    }
+  }
+}
+```
+
+You can also add additional models
+
+```json title="opencode.json"
+{
+  "$schema": "https://opencode.ai/config.json",
+  "provider": {
+    "openrouter": {
+      "models": {
+        "somecoolnewmodel": {},
+      }
+    }
+  }
+}
+
+---
+
 ### Local
 
 You can configure local model like ones served through LM Studio or Ollama. To