Browse Source

add small model for title generation

Dax Raad 7 months ago
parent
commit
b1ab641905

+ 1 - 3
package.json

@@ -39,7 +39,5 @@
     "protobufjs",
     "sharp"
   ],
-  "patchedDependencies": {
-    "[email protected]": "patches/[email protected]"
-  }
+  "patchedDependencies": {}
 }

+ 11 - 0
packages/opencode/src/provider/provider.ts

@@ -408,6 +408,17 @@ export namespace Provider {
     }
   }
 
+  export async function getSmallModel(providerID: string) {
+    const provider = await state().then((state) => state.providers[providerID])
+    if (!provider) return
+    const priority = ["3-5-haiku", "3.5-haiku", "gemini-2.5-flash"]
+    for (const item of priority) {
+      for (const model of Object.keys(provider.info.models)) {
+        if (model.includes(item)) return getModel(providerID, model)
+      }
+    }
+  }
+
   const priority = ["gemini-2.5-pro-preview", "codex-mini", "claude-sonnet-4"]
   export function sort(models: ModelsDev.Model[]) {
     return sortBy(

+ 3 - 1
packages/opencode/src/session/index.ts

@@ -504,6 +504,7 @@ export namespace Session {
       })
 
     if (msgs.length === 0 && !session.parentID) {
+      const small = (await Provider.getSmallModel(input.providerID)) ?? model
       generateText({
         maxOutputTokens: input.providerID === "google" ? 1024 : 20,
         providerOptions: model.info.options,
@@ -528,7 +529,7 @@ export namespace Session {
             },
           ]),
         ],
-        model: model.language,
+        model: small.language,
       })
         .then((result) => {
           if (result.text)
@@ -1012,6 +1013,7 @@ export namespace Session {
 
     const processor = createProcessor(next, model.info)
     const stream = streamText({
+      maxRetries: 10,
       abortSignal: abort.signal,
       model: model.language,
       messages: [

+ 0 - 4
packages/opencode/src/tool/read.ts

@@ -7,7 +7,6 @@ import { FileTime } from "../file/time"
 import DESCRIPTION from "./read.txt"
 import { App } from "../app/app"
 
-const MAX_READ_SIZE = 250 * 1024
 const DEFAULT_READ_LIMIT = 2000
 const MAX_LINE_LENGTH = 2000
 
@@ -45,10 +44,7 @@ export const ReadTool = Tool.define({
 
       throw new Error(`File not found: ${filePath}`)
     }
-    const stats = await file.stat()
 
-    if (stats.size > MAX_READ_SIZE)
-      throw new Error(`File is too large (${stats.size} bytes). Maximum size is ${MAX_READ_SIZE} bytes`)
     const limit = params.limit ?? DEFAULT_READ_LIMIT
     const offset = params.offset || 0
     const isImage = isImageFile(filePath)

+ 1 - 1
packages/tui/internal/tui/tui.go

@@ -492,7 +492,7 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 	case app.SessionSelectedMsg:
 		messages, err := a.app.ListMessages(context.Background(), msg.ID)
 		if err != nil {
-			slog.Error("Failed to list messages", "error", err)
+			slog.Error("Failed to list messages", "error", err.Error())
 			return a, toast.NewErrorToast("Failed to open session")
 		}
 		a.app.Session = msg

+ 0 - 13
patches/[email protected]

@@ -1,13 +0,0 @@
-diff --git a/dist/index.mjs b/dist/index.mjs
-index 92a80377692488c4ba8801ce33e7736ad7055e43..add6281bbecaa1c03d3b48eb99aead4a7a7336b2 100644
---- a/dist/index.mjs
-+++ b/dist/index.mjs
-@@ -1593,7 +1593,7 @@ function prepareCallSettings({
-   return {
-     maxTokens,
-     // TODO v5 remove default 0 for temperature
--    temperature: temperature != null ? temperature : 0,
-+    temperature: temperature,
-     topP,
-     topK,
-     presencePenalty,