Ver Fonte

fix: narrow several `from any` type assertions in opencode core (#22926)

Kit Langton há 4 dias atrás
pai
commit
23ed876835

+ 1 - 1
packages/opencode/src/cli/cmd/tui/context/kv.tsx

@@ -12,7 +12,7 @@ export const { use: useKV, provider: KVProvider } = createSimpleContext({
     const [store, setStore] = createStore<Record<string, any>>()
     const filePath = path.join(Global.Path.state, "kv.json")
 
-    Filesystem.readJson(filePath)
+    Filesystem.readJson<Record<string, any>>(filePath)
       .then((x) => {
         setStore(x)
       })

+ 4 - 4
packages/opencode/src/cli/error.ts

@@ -28,10 +28,10 @@ export function FormatError(input: unknown) {
   // ProviderModelNotFoundError: { providerID: string, modelID: string, suggestions?: string[] }
   if (NamedError.hasName(input, "ProviderModelNotFoundError")) {
     const data = (input as ErrorLike).data
-    const suggestions = data?.suggestions as string[] | undefined
+    const suggestions: string[] = Array.isArray(data?.suggestions) ? data.suggestions : []
     return [
       `Model not found: ${data?.providerID}/${data?.modelID}`,
-      ...(Array.isArray(suggestions) && suggestions.length ? ["Did you mean: " + suggestions.join(", ")] : []),
+      ...(suggestions.length ? ["Did you mean: " + suggestions.join(", ")] : []),
       `Try: \`opencode models\` to list available models`,
       `Or check your config (opencode.json) provider/model names`,
     ].join("\n")
@@ -64,10 +64,10 @@ export function FormatError(input: unknown) {
     const data = (input as ErrorLike).data
     const path = data?.path
     const message = data?.message
-    const issues = data?.issues as Array<{ message: string; path: string[] }> | undefined
+    const issues: Array<{ message: string; path: string[] }> = Array.isArray(data?.issues) ? data.issues : []
     return [
       `Configuration is invalid${path && path !== "config" ? ` at ${path}` : ""}` + (message ? `: ${message}` : ""),
-      ...(issues?.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")) ?? []),
+      ...issues.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")),
     ].join("\n")
   }
 

+ 4 - 5
packages/opencode/src/lsp/lsp.ts

@@ -440,12 +440,11 @@ export const layer = Layer.effect(
     const workspaceSymbol = Effect.fn("LSP.workspaceSymbol")(function* (query: string) {
       const results = yield* runAll((client) =>
         client.connection
-          .sendRequest("workspace/symbol", { query })
-          .then((result: any) => result.filter((x: Symbol) => kinds.includes(x.kind)))
-          .then((result: any) => result.slice(0, 10))
-          .catch(() => []),
+          .sendRequest<Symbol[]>("workspace/symbol", { query })
+          .then((result) => result.filter((x) => kinds.includes(x.kind)).slice(0, 10))
+          .catch(() => [] as Symbol[]),
       )
-      return results.flat() as Symbol[]
+      return results.flat()
     })
 
     const prepareCallHierarchy = Effect.fn("LSP.prepareCallHierarchy")(function* (input: LocInput) {

+ 11 - 2
packages/opencode/src/npm/index.ts

@@ -124,8 +124,17 @@ export async function install(dir: string) {
     return
   }
 
-  const pkg = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({}))
-  const lock = await Filesystem.readJson(path.join(dir, "package-lock.json")).catch(() => ({}))
+  type PackageDeps = Record<string, string>
+  type PackageJson = {
+    dependencies?: PackageDeps
+    devDependencies?: PackageDeps
+    peerDependencies?: PackageDeps
+    optionalDependencies?: PackageDeps
+  }
+  const pkg: PackageJson = await Filesystem.readJson<PackageJson>(path.join(dir, "package.json")).catch(() => ({}))
+  const lock: { packages?: Record<string, PackageJson> } = await Filesystem.readJson<{
+    packages?: Record<string, PackageJson>
+  }>(path.join(dir, "package-lock.json")).catch(() => ({}))
 
   const declared = new Set([
     ...Object.keys(pkg.dependencies || {}),

+ 4 - 2
packages/opencode/src/provider/provider.ts

@@ -547,12 +547,14 @@ function custom(dep: CustomDep): Record<string, CustomLoader> {
         },
         async getModel(sdk: any, modelID: string, options?: Record<string, any>) {
           if (modelID.startsWith("duo-workflow-")) {
-            const workflowRef = options?.workflowRef as string | undefined
+            const workflowRef = typeof options?.workflowRef === "string" ? options.workflowRef : undefined
             // Use the static mapping if it exists, otherwise use duo-workflow with selectedModelRef
             const sdkModelID = isWorkflowModel(modelID) ? modelID : "duo-workflow"
+            const workflowDefinition =
+              typeof options?.workflowDefinition === "string" ? options.workflowDefinition : undefined
             const model = sdk.workflowChat(sdkModelID, {
               featureFlags,
-              workflowDefinition: options?.workflowDefinition as string | undefined,
+              workflowDefinition,
             })
             if (workflowRef) {
               model.selectedModelRef = workflowRef

+ 12 - 10
packages/opencode/src/session/session.ts

@@ -272,16 +272,18 @@ export const getUsage = (input: { model: Provider.Model; usage: LanguageModelUsa
     input.usage.inputTokenDetails?.cacheReadTokens ?? input.usage.cachedInputTokens ?? 0,
   )
   const cacheWriteInputTokens = safe(
-    (input.usage.inputTokenDetails?.cacheWriteTokens ??
-      input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ??
-      // google-vertex-anthropic returns metadata under "vertex" key
-      // (AnthropicMessagesLanguageModel custom provider key from 'vertex.anthropic.messages')
-      input.metadata?.["vertex"]?.["cacheCreationInputTokens"] ??
-      // @ts-expect-error
-      input.metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ??
-      // @ts-expect-error
-      input.metadata?.["venice"]?.["usage"]?.["cacheCreationInputTokens"] ??
-      0) as number,
+    Number(
+      input.usage.inputTokenDetails?.cacheWriteTokens ??
+        input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ??
+        // google-vertex-anthropic returns metadata under "vertex" key
+        // (AnthropicMessagesLanguageModel custom provider key from 'vertex.anthropic.messages')
+        input.metadata?.["vertex"]?.["cacheCreationInputTokens"] ??
+        // @ts-expect-error
+        input.metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ??
+        // @ts-expect-error
+        input.metadata?.["venice"]?.["usage"]?.["cacheCreationInputTokens"] ??
+        0,
+    ),
   )
 
   // AI SDK v6 normalized inputTokens to include cached tokens across all providers

+ 1 - 1
packages/opencode/src/tool/tool.ts

@@ -19,7 +19,7 @@ export type Context<M extends Metadata = Metadata> = {
   agent: string
   abort: AbortSignal
   callID?: string
-  extra?: { [key: string]: any }
+  extra?: { [key: string]: unknown }
   messages: MessageV2.WithParts[]
   metadata(input: { title?: string; metadata?: M }): Effect.Effect<void>
   ask(input: Omit<Permission.Request, "id" | "sessionID" | "tool">): Effect.Effect<void>

+ 1 - 1
packages/opencode/src/util/filesystem.ts

@@ -39,7 +39,7 @@ export async function readText(p: string): Promise<string> {
   return readFile(p, "utf-8")
 }
 
-export async function readJson<T = any>(p: string): Promise<T> {
+export async function readJson<T = unknown>(p: string): Promise<T> {
   return JSON.parse(await readFile(p, "utf-8"))
 }
 

+ 1 - 1
packages/opencode/test/config/config.test.ts

@@ -757,7 +757,7 @@ test("updates config and writes to file", async () => {
       const newConfig = { model: "updated/model" }
       await save(newConfig as any)
 
-      const writtenConfig = await Filesystem.readJson(path.join(tmp.path, "config.json"))
+      const writtenConfig = await Filesystem.readJson<{ model: string }>(path.join(tmp.path, "config.json"))
       expect(writtenConfig.model).toBe("updated/model")
     },
   })