Преглед изворни кода

tweak: adjust chat.params hook to allow altering of the maxOutputTokens (#21220)

Aiden Cline пре 1 недеља
родитељ
комит
40e4cd27a1
2 измењених фајлова са 14 додато и 7 уклоњено
  1. 7 6
      packages/opencode/src/session/llm.ts
  2. 7 1
      packages/plugin/src/index.ts

+ 7 - 6
packages/opencode/src/session/llm.ts

@@ -160,6 +160,11 @@ export namespace LLM {
             ...input.messages,
           ]
 
+    const maxOutputTokens =
+      isOpenaiOauth || provider.id.includes("github-copilot")
+        ? undefined
+        : ProviderTransform.maxOutputTokens(input.model)
+
     const params = await Plugin.trigger(
       "chat.params",
       {
@@ -175,6 +180,7 @@ export namespace LLM {
           : undefined,
         topP: input.agent.topP ?? ProviderTransform.topP(input.model),
         topK: ProviderTransform.topK(input.model),
+        maxOutputTokens,
         options,
       },
     )
@@ -193,11 +199,6 @@ export namespace LLM {
       },
     )
 
-    const maxOutputTokens =
-      isOpenaiOauth || provider.id.includes("github-copilot")
-        ? undefined
-        : ProviderTransform.maxOutputTokens(input.model)
-
     const tools = await resolveTools(input)
 
     // LiteLLM and some Anthropic proxies require the tools parameter to be present
@@ -291,7 +292,7 @@ export namespace LLM {
       activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
       tools,
       toolChoice: input.toolChoice,
-      maxOutputTokens,
+      maxOutputTokens: params.maxOutputTokens,
       abortSignal: input.abort,
       headers: {
         ...(input.model.providerID.startsWith("opencode")

+ 7 - 1
packages/plugin/src/index.ts

@@ -212,7 +212,13 @@ export interface Hooks {
    */
   "chat.params"?: (
     input: { sessionID: string; agent: string; model: Model; provider: ProviderContext; message: UserMessage },
-    output: { temperature: number; topP: number; topK: number; options: Record<string, any> },
+    output: {
+      temperature: number
+      topP: number
+      topK: number
+      maxOutputTokens: number | undefined
+      options: Record<string, any>
+    },
   ) => Promise<void>
   "chat.headers"?: (
     input: { sessionID: string; agent: string; model: Model; provider: ProviderContext; message: UserMessage },