Răsfoiți Sursa

feat: nix support for the nix folks (#3924)

Co-authored-by: opencode <[email protected]>
Co-authored-by: opencode-agent[bot] <opencode-agent[bot]@users.noreply.github.com>
Albert O'Shea 2 luni în urmă
părinte
comite
5e13527416

+ 79 - 0
.github/workflows/update-nix-hashes.yml

@@ -0,0 +1,79 @@
+name: Update Nix Hashes
+
+permissions:
+  contents: write
+
+on:
+  workflow_dispatch:
+  push:
+    paths:
+      - 'bun.lock'
+      - 'package.json'
+      - 'packages/*/package.json'
+  pull_request:
+    paths:
+      - 'bun.lock'
+      - 'package.json'
+      - 'packages/*/package.json'
+
+jobs:
+  update:
+    runs-on: ubuntu-latest
+    env:
+      SYSTEM: x86_64-linux
+
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+        with:
+          token: ${{ secrets.GITHUB_TOKEN }}
+          fetch-depth: 0
+
+      - name: Setup Nix
+        uses: DeterminateSystems/nix-installer-action@v20
+
+      - name: Configure git
+        run: |
+          git config --global user.email "[email protected]"
+          git config --global user.name "opencode"
+
+      - name: Update node_modules hash
+        run: |
+          set -euo pipefail
+          nix/scripts/update-hashes.sh
+
+      - name: Commit hash changes
+        env:
+          TARGET_BRANCH: ${{ github.head_ref || github.ref_name }}
+        run: |
+          set -euo pipefail
+
+          summarize() {
+            local status="$1"
+            {
+              echo "### Nix Hash Update"
+              echo ""
+              echo "- ref: ${GITHUB_REF_NAME}"
+              echo "- status: ${status}"
+            } >> "$GITHUB_STEP_SUMMARY"
+            if [ -n "${GITHUB_SERVER_URL:-}" ] && [ -n "${GITHUB_REPOSITORY:-}" ] && [ -n "${GITHUB_RUN_ID:-}" ]; then
+              echo "- run: ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" >> "$GITHUB_STEP_SUMMARY"
+            fi
+            echo "" >> "$GITHUB_STEP_SUMMARY"
+          }
+
+          FILES=(flake.nix nix/node-modules.nix nix/hashes.json)
+          STATUS="$(git status --short -- "${FILES[@]}" || true)"
+          if [ -z "$STATUS" ]; then
+            summarize "no changes"
+            echo "No changes to tracked Nix files. Hashes are already up to date."
+            exit 0
+          fi
+
+          git add "${FILES[@]}"
+          git commit -m "Update Nix hashes"
+
+          BRANCH="${TARGET_BRANCH:-${GITHUB_REF_NAME}}"
+          git push origin HEAD:"$BRANCH"
+
+          summarize "committed $(git rev-parse --short HEAD)"

+ 1 - 0
.gitignore

@@ -13,6 +13,7 @@ dist
 .turbo
 **/.serena
 .serena/
+/result
 refs
 Session.vim
 opencode.json

+ 2 - 1
README.md

@@ -28,9 +28,10 @@ curl -fsSL https://opencode.ai/install | bash
 npm i -g opencode-ai@latest        # or bun/pnpm/yarn
 scoop bucket add extras; scoop install extras/opencode  # Windows
 choco install opencode             # Windows
-brew install opencode      # macOS and Linux
+brew install opencode              # macOS and Linux
 paru -S opencode-bin               # Arch Linux
 mise use --pin -g ubi:sst/opencode # Any OS
+nix run nixpkgs#opencode           # or github:sst/opencode for latest dev branch
 ```
 
 > [!TIP]

+ 27 - 0
flake.lock

@@ -0,0 +1,27 @@
+{
+  "nodes": {
+    "nixpkgs": {
+      "locked": {
+        "lastModified": 1762156382,
+        "narHash": "sha256-Yg7Ag7ov5+36jEFC1DaZh/12SEXo6OO3/8rqADRxiqs=",
+        "owner": "NixOS",
+        "repo": "nixpkgs",
+        "rev": "7241bcbb4f099a66aafca120d37c65e8dda32717",
+        "type": "github"
+      },
+      "original": {
+        "owner": "NixOS",
+        "ref": "nixpkgs-unstable",
+        "repo": "nixpkgs",
+        "type": "github"
+      }
+    },
+    "root": {
+      "inputs": {
+        "nixpkgs": "nixpkgs"
+      }
+    }
+  },
+  "root": "root",
+  "version": 7
+}

+ 107 - 0
flake.nix

@@ -0,0 +1,107 @@
+{
+  description = "OpenCode development flake";
+
+  inputs = {
+    nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
+  };
+
+  outputs =
+    {
+      nixpkgs,
+      ...
+    }:
+    let
+      systems = [
+        "aarch64-linux"
+        "x86_64-linux"
+        "aarch64-darwin"
+        "x86_64-darwin"
+      ];
+      lib = nixpkgs.lib;
+      forEachSystem = lib.genAttrs systems;
+      pkgsFor = system: nixpkgs.legacyPackages.${system};
+      packageJson = builtins.fromJSON (builtins.readFile ./packages/opencode/package.json);
+      bunTarget = {
+        "aarch64-linux" = "bun-linux-arm64";
+        "x86_64-linux" = "bun-linux-x64";
+        "aarch64-darwin" = "bun-darwin-arm64";
+        "x86_64-darwin" = "bun-darwin-x64";
+      };
+      defaultNodeModules = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=";
+      hashesFile = "${./nix}/hashes.json";
+      hashesData =
+        if builtins.pathExists hashesFile then builtins.fromJSON (builtins.readFile hashesFile) else { };
+      nodeModulesHash = hashesData.nodeModules or defaultNodeModules;
+      modelsDev = forEachSystem (
+        system:
+        let
+          pkgs = pkgsFor system;
+        in
+        pkgs."models-dev"
+      );
+    in
+    {
+      devShells = forEachSystem (
+        system:
+        let
+          pkgs = pkgsFor system;
+        in
+        {
+          default = pkgs.mkShell {
+            packages = with pkgs; [
+              bun
+              nodejs_20
+              pkg-config
+              openssl
+              git
+            ];
+          };
+        }
+      );
+
+      packages = forEachSystem (
+        system:
+        let
+          pkgs = pkgsFor system;
+          mkNodeModules = pkgs.callPackage ./nix/node-modules.nix {
+            hash = nodeModulesHash;
+          };
+          mkPackage = pkgs.callPackage ./nix/opencode.nix { };
+        in
+        {
+          default = mkPackage {
+            version = packageJson.version;
+            src = ./.;
+            scripts = ./nix/scripts;
+            target = bunTarget.${system};
+            modelsDev = "${modelsDev.${system}}/dist/_api.json";
+            mkNodeModules = mkNodeModules;
+          };
+        }
+      );
+
+      apps = forEachSystem (
+        system:
+        let
+          pkgs = pkgsFor system;
+        in
+        {
+          opencode-dev = {
+            type = "app";
+            meta = {
+              description = "Nix devshell shell for OpenCode";
+              runtimeInputs = [ pkgs.bun ];
+            };
+            program = "${
+              pkgs.writeShellApplication {
+                name = "opencode-dev";
+                text = ''
+                  exec bun run dev "$@"
+                '';
+              }
+            }/bin/opencode-dev";
+          };
+        }
+      );
+    };
+}

+ 3 - 0
nix/hashes.json

@@ -0,0 +1,3 @@
+{
+  "nodeModules": "sha256-srbGIRjvpqUF+jWq4GAx7sGAasq02dRySnxTjijJJT8="
+}

+ 52 - 0
nix/node-modules.nix

@@ -0,0 +1,52 @@
+{ hash, lib, stdenvNoCC, bun, cacert, curl }:
+args:
+stdenvNoCC.mkDerivation {
+  pname = "opencode-node_modules";
+  version = args.version;
+  src = args.src;
+
+  impureEnvVars =
+    lib.fetchers.proxyImpureEnvVars
+    ++ [
+      "GIT_PROXY_COMMAND"
+      "SOCKS_SERVER"
+    ];
+
+  nativeBuildInputs = [ bun cacert curl ];
+
+  dontConfigure = true;
+
+  buildPhase = ''
+    runHook preBuild
+    export HOME=$(mktemp -d)
+    export BUN_INSTALL_CACHE_DIR=$(mktemp -d)
+    bun install \
+      --cpu="*" \
+      --os="*" \
+      --frozen-lockfile \
+      --ignore-scripts \
+      --no-progress \
+      --linker=isolated
+    bun --bun ${args.canonicalizeScript}
+    bun --bun ${args.normalizeBinsScript}
+    runHook postBuild
+  '';
+
+  installPhase = ''
+    runHook preInstall
+    mkdir -p $out
+    while IFS= read -r dir; do
+      rel="''${dir#./}"
+      dest="$out/$rel"
+      mkdir -p "$(dirname "$dest")"
+      cp -R "$dir" "$dest"
+    done < <(find . -type d -name node_modules -prune | sort)
+    runHook postInstall
+  '';
+
+  dontFixup = true;
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = hash;
+}

+ 108 - 0
nix/opencode.nix

@@ -0,0 +1,108 @@
+{ lib, stdenv, stdenvNoCC, bun, fzf, ripgrep, makeBinaryWrapper }:
+args:
+let
+  scripts = args.scripts;
+  mkModules =
+    attrs:
+    args.mkNodeModules (
+      attrs
+      // {
+        canonicalizeScript = scripts + "/canonicalize-node-modules.ts";
+        normalizeBinsScript = scripts + "/normalize-bun-binaries.ts";
+      }
+    );
+in
+stdenvNoCC.mkDerivation (finalAttrs: {
+  pname = "opencode";
+  version = args.version;
+
+  src = args.src;
+
+  node_modules = mkModules {
+    version = finalAttrs.version;
+    src = finalAttrs.src;
+  };
+
+  nativeBuildInputs = [
+    bun
+    makeBinaryWrapper
+  ];
+
+  configurePhase = ''
+    runHook preConfigure
+    cp -R ${finalAttrs.node_modules}/. .
+    runHook postConfigure
+  '';
+
+  env.MODELS_DEV_API_JSON = args.modelsDev;
+  env.OPENCODE_VERSION = args.version;
+  env.OPENCODE_CHANNEL = "stable";
+
+  buildPhase = ''
+    runHook preBuild
+
+    cp ${scripts + "/bun-build.ts"} bun-build.ts
+
+    substituteInPlace bun-build.ts \
+      --replace '@VERSION@' "${finalAttrs.version}"
+
+    export BUN_COMPILE_TARGET=${args.target}
+    bun --bun bun-build.ts
+
+    runHook postBuild
+  '';
+
+  dontStrip = true;
+
+  installPhase = ''
+    runHook preInstall
+
+    cd packages/opencode
+    if [ ! -f opencode ]; then
+      echo "ERROR: opencode binary not found in $(pwd)"
+      ls -la
+      exit 1
+    fi
+    if [ ! -f opencode-worker.js ]; then
+      echo "ERROR: opencode worker bundle not found in $(pwd)"
+      ls -la
+      exit 1
+    fi
+
+    install -Dm755 opencode $out/bin/opencode
+    install -Dm644 opencode-worker.js $out/bin/opencode-worker.js
+    if [ -f opencode-assets.manifest ]; then
+      while IFS= read -r asset; do
+        [ -z "$asset" ] && continue
+        if [ ! -f "$asset" ]; then
+          echo "ERROR: referenced asset \"$asset\" missing"
+          exit 1
+        fi
+        install -Dm644 "$asset" "$out/bin/$(basename "$asset")"
+      done < opencode-assets.manifest
+    fi
+    runHook postInstall
+  '';
+
+  postFixup = ''
+    wrapProgram "$out/bin/opencode" --prefix PATH : ${lib.makeBinPath [ fzf ripgrep ]}
+  '';
+
+  meta = {
+    description = "AI coding agent built for the terminal";
+    longDescription = ''
+      OpenCode is a terminal-based agent that can build anything.
+      It combines a TypeScript/JavaScript core with a Go-based TUI
+      to provide an interactive AI coding experience.
+    '';
+    homepage = "https://github.com/sst/opencode";
+    license = lib.licenses.mit;
+    platforms = [
+      "aarch64-linux"
+      "x86_64-linux"
+      "aarch64-darwin"
+      "x86_64-darwin"
+    ];
+    mainProgram = "opencode";
+  };
+})

+ 117 - 0
nix/scripts/bun-build.ts

@@ -0,0 +1,117 @@
+import solidPlugin from "./packages/opencode/node_modules/@opentui/solid/scripts/solid-plugin"
+import path from "path"
+import fs from "fs"
+
+const version = "@VERSION@"
+const pkg = path.join(process.cwd(), "packages/opencode")
+const parser = fs.realpathSync(
+  path.join(pkg, "./node_modules/@opentui/core/parser.worker.js"),
+)
+const worker = "./src/cli/cmd/tui/worker.ts"
+const target = process.env["BUN_COMPILE_TARGET"]
+
+if (!target) {
+  throw new Error("BUN_COMPILE_TARGET not set")
+}
+
+process.chdir(pkg)
+
+const manifestName = "opencode-assets.manifest"
+const manifestPath = path.join(pkg, manifestName)
+
+const readTrackedAssets = () => {
+  if (!fs.existsSync(manifestPath)) return []
+  return fs
+    .readFileSync(manifestPath, "utf8")
+    .split("\n")
+    .map((line) => line.trim())
+    .filter((line) => line.length > 0)
+}
+
+const removeTrackedAssets = () => {
+  for (const file of readTrackedAssets()) {
+    const filePath = path.join(pkg, file)
+    if (fs.existsSync(filePath)) {
+      fs.rmSync(filePath, { force: true })
+    }
+  }
+}
+
+const assets = new Set<string>()
+
+const addAsset = async (p: string) => {
+  const file = path.basename(p)
+  const dest = path.join(pkg, file)
+  await Bun.write(dest, Bun.file(p))
+  assets.add(file)
+}
+
+removeTrackedAssets()
+
+const result = await Bun.build({
+  conditions: ["browser"],
+  tsconfig: "./tsconfig.json",
+  plugins: [solidPlugin],
+  sourcemap: "external",
+  entrypoints: ["./src/index.ts", parser, worker],
+  define: {
+    OPENCODE_VERSION: `'@VERSION@'`,
+    OTUI_TREE_SITTER_WORKER_PATH: "/$bunfs/root/" + path.relative(pkg, parser).replace(/\\/g, "/"),
+    OPENCODE_CHANNEL: "'latest'",
+  },
+  compile: {
+    target,
+    outfile: "opencode",
+    execArgv: ["--user-agent=opencode/" + version, "--env-file=\"\"", "--"],
+    windows: {},
+  },
+})
+
+if (!result.success) {
+  console.error("Build failed!")
+  for (const log of result.logs) {
+    console.error(log)
+  }
+  throw new Error("Compilation failed")
+}
+
+const assetOutputs = result.outputs?.filter((x) => x.kind === "asset") ?? []
+for (const x of assetOutputs) {
+  await addAsset(x.path)
+}
+
+const bundle = await Bun.build({
+  entrypoints: [worker],
+  tsconfig: "./tsconfig.json",
+  plugins: [solidPlugin],
+  target: "bun",
+  outdir: "./.opencode-worker",
+  sourcemap: "none",
+})
+
+if (!bundle.success) {
+  console.error("Worker build failed!")
+  for (const log of bundle.logs) {
+    console.error(log)
+  }
+  throw new Error("Worker compilation failed")
+}
+
+const workerAssets = bundle.outputs?.filter((x) => x.kind === "asset") ?? []
+for (const x of workerAssets) {
+  await addAsset(x.path)
+}
+
+const output = bundle.outputs.find((x) => x.kind === "entry-point")
+if (!output) {
+  throw new Error("Worker build produced no entry-point output")
+}
+
+const dest = path.join(pkg, "opencode-worker.js")
+await Bun.write(dest, Bun.file(output.path))
+fs.rmSync(path.dirname(output.path), { recursive: true, force: true })
+
+const list = Array.from(assets)
+await Bun.write(manifestPath, list.length > 0 ? list.join("\n") + "\n" : "")
+
+console.log("Build successful!")

+ 94 - 0
nix/scripts/canonicalize-node-modules.ts

@@ -0,0 +1,94 @@
+import { lstat, mkdir, readdir, rm, symlink } from "fs/promises"
+import { join, relative } from "path"
+
+type SemverLike = {
+  valid: (value: string) => string | null
+  rcompare: (left: string, right: string) => number
+}
+
+type Entry = {
+  dir: string
+  version: string
+  label: string
+}
+
+const root = process.cwd()
+const bunRoot = join(root, "node_modules/.bun")
+const linkRoot = join(bunRoot, "node_modules")
+const directories = (await readdir(bunRoot)).sort()
+const versions = new Map<string, Entry[]>()
+
+for (const entry of directories) {
+  const full = join(bunRoot, entry)
+  const info = await lstat(full)
+  if (!info.isDirectory()) {
+    continue
+  }
+  const marker = entry.lastIndexOf("@")
+  if (marker <= 0) {
+    continue
+  }
+  const slug = entry.slice(0, marker).replace(/\+/g, "/")
+  const version = entry.slice(marker + 1)
+  const list = versions.get(slug) ?? []
+  list.push({ dir: full, version, label: entry })
+  versions.set(slug, list)
+}
+
+const semverModule = (await import(join(bunRoot, "node_modules/semver"))) as SemverLike | {
+  default: SemverLike
+}
+const semver = "default" in semverModule ? semverModule.default : semverModule
+const selections = new Map<string, Entry>()
+
+for (const [slug, list] of versions) {
+  list.sort((a, b) => {
+    const left = semver.valid(a.version)
+    const right = semver.valid(b.version)
+    if (left && right) {
+      const delta = semver.rcompare(left, right)
+      if (delta !== 0) {
+        return delta
+      }
+    }
+    if (left && !right) {
+      return -1
+    }
+    if (!left && right) {
+      return 1
+    }
+    return b.version.localeCompare(a.version)
+  })
+  selections.set(slug, list[0])
+}
+
+await rm(linkRoot, { recursive: true, force: true })
+await mkdir(linkRoot, { recursive: true })
+
+const rewrites: string[] = []
+
+for (const [slug, entry] of Array.from(selections.entries()).sort((a, b) => a[0].localeCompare(b[0]))) {
+  const parts = slug.split("/")
+  const leaf = parts.pop()
+  if (!leaf) {
+    continue
+  }
+  const parent = join(linkRoot, ...parts)
+  await mkdir(parent, { recursive: true })
+  const linkPath = join(parent, leaf)
+  const desired = join(entry.dir, "node_modules", slug)
+  const relativeTarget = relative(parent, desired)
+  const resolved = relativeTarget.length === 0 ? "." : relativeTarget
+  await rm(linkPath, { recursive: true, force: true })
+  await symlink(resolved, linkPath)
+  rewrites.push(slug + " -> " + resolved)
+}
+
+rewrites.sort()
+console.log("[canonicalize-node-modules] rebuilt", rewrites.length, "links")
+for (const line of rewrites.slice(0, 20)) {
+  console.log("  ", line)
+}
+if (rewrites.length > 20) {
+  console.log("  ...")
+}

+ 138 - 0
nix/scripts/normalize-bun-binaries.ts

@@ -0,0 +1,138 @@
+import { lstat, mkdir, readdir, rm, symlink } from "fs/promises"
+import { join, relative } from "path"
+
+type PackageManifest = {
+  name?: string
+  bin?: string | Record<string, string>
+}
+
+const root = process.cwd()
+const bunRoot = join(root, "node_modules/.bun")
+const bunEntries = (await safeReadDir(bunRoot)).sort()
+let rewritten = 0
+
+for (const entry of bunEntries) {
+  const modulesRoot = join(bunRoot, entry, "node_modules")
+  if (!(await exists(modulesRoot))) {
+    continue
+  }
+  const binRoot = join(modulesRoot, ".bin")
+  await rm(binRoot, { recursive: true, force: true })
+  await mkdir(binRoot, { recursive: true })
+
+  const packageDirs = await collectPackages(modulesRoot)
+  for (const packageDir of packageDirs) {
+    const manifest = await readManifest(packageDir)
+    if (!manifest) {
+      continue
+    }
+    const binField = manifest.bin
+    if (!binField) {
+      continue
+    }
+    const seen = new Set<string>()
+    if (typeof binField === "string") {
+      const fallback = manifest.name ?? packageDir.split("/").pop()
+      if (fallback) {
+        await linkBinary(binRoot, fallback, packageDir, binField, seen)
+      }
+    } else {
+      const entries = Object.entries(binField).sort((a, b) => a[0].localeCompare(b[0]))
+      for (const [name, target] of entries) {
+        await linkBinary(binRoot, name, packageDir, target, seen)
+      }
+    }
+  }
+}
+
+console.log(`[normalize-bun-binaries] rewrote ${rewritten} links`)
+
+async function collectPackages(modulesRoot: string) {
+  const found: string[] = []
+  const topLevel = (await safeReadDir(modulesRoot)).sort()
+  for (const name of topLevel) {
+    if (name === ".bin" || name === ".bun") {
+      continue
+    }
+    const full = join(modulesRoot, name)
+    if (!(await isDirectory(full))) {
+      continue
+    }
+    if (name.startsWith("@")) {
+      const scoped = (await safeReadDir(full)).sort()
+      for (const child of scoped) {
+        const scopedDir = join(full, child)
+        if (await isDirectory(scopedDir)) {
+          found.push(scopedDir)
+        }
+      }
+      continue
+    }
+    found.push(full)
+  }
+  return found.sort()
+}
+
+async function readManifest(dir: string) {
+  const file = Bun.file(join(dir, "package.json"))
+  if (!(await file.exists())) {
+    return null
+  }
+  const data = (await file.json()) as PackageManifest
+  return data
+}
+
+async function linkBinary(binRoot: string, name: string, packageDir: string, target: string, seen: Set<string>) {
+  if (!name || !target) {
+    return
+  }
+  const normalizedName = normalizeBinName(name)
+  if (seen.has(normalizedName)) {
+    return
+  }
+  const resolved = join(packageDir, target)
+  const script = Bun.file(resolved)
+  if (!(await script.exists())) {
+    return
+  }
+  seen.add(normalizedName)
+  const destination = join(binRoot, normalizedName)
+  const relativeTarget = relative(binRoot, resolved) || "."
+  await rm(destination, { force: true })
+  await symlink(relativeTarget, destination)
+  rewritten++
+}
+
+async function exists(path: string) {
+  try {
+    await lstat(path)
+    return true
+  } catch {
+    return false
+  }
+}
+
+async function isDirectory(path: string) {
+  try {
+    const info = await lstat(path)
+    return info.isDirectory()
+  } catch {
+    return false
+  }
+}
+
+async function safeReadDir(path: string) {
+  try {
+    return await readdir(path)
+  } catch {
+    return []
+  }
+}
+
+function normalizeBinName(name: string) {
+  const slash = name.lastIndexOf("/")
+  if (slash >= 0) {
+    return name.slice(slash + 1)
+  }
+  return name
+}

+ 112 - 0
nix/scripts/update-hashes.sh

@@ -0,0 +1,112 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+
+DUMMY="sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
+SYSTEM=${SYSTEM:-x86_64-linux}
+DEFAULT_HASH_FILE=${MODULES_HASH_FILE:-nix/hashes.json}
+HASH_FILE=${HASH_FILE:-$DEFAULT_HASH_FILE}
+
+if [ ! -f "$HASH_FILE" ]; then
+  cat >"$HASH_FILE" <<EOF
+{
+  "nodeModules": "$DUMMY"
+}
+EOF
+fi
+
+if git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
+  if ! git ls-files --error-unmatch "$HASH_FILE" >/dev/null 2>&1; then
+    git add -N "$HASH_FILE" >/dev/null 2>&1 || true
+  fi
+fi
+
+export DUMMY
+export NIX_KEEP_OUTPUTS=1
+export NIX_KEEP_DERIVATIONS=1
+
+cleanup() {
+  rm -f "${JSON_OUTPUT:-}" "${BUILD_LOG:-}" "${TMP_EXPR:-}"
+}
+
+trap cleanup EXIT
+
+write_node_modules_hash() {
+  local value="$1"
+  local temp
+  temp=$(mktemp)
+  jq --arg value "$value" '.nodeModules = $value' "$HASH_FILE" >"$temp"
+  mv "$temp" "$HASH_FILE"
+}
+
+TARGET="packages.${SYSTEM}.default"
+MODULES_ATTR=".#packages.${SYSTEM}.default.node_modules"
+CORRECT_HASH=""
+
+DRV_PATH="$(nix eval --raw "${MODULES_ATTR}.drvPath")"
+
+echo "Setting dummy node_modules outputHash for ${SYSTEM}..."
+write_node_modules_hash "$DUMMY"
+
+BUILD_LOG=$(mktemp)
+JSON_OUTPUT=$(mktemp)
+
+echo "Building node_modules for ${SYSTEM} to discover correct outputHash..."
+echo "Attempting to realize derivation: ${DRV_PATH}"
+REALISE_OUT=$(nix-store --realise "$DRV_PATH" --keep-failed 2>&1 | tee "$BUILD_LOG" || true)
+
+BUILD_PATH=$(echo "$REALISE_OUT" | grep "^/nix/store/" | head -n1 || true)
+if [ -n "$BUILD_PATH" ] && [ -d "$BUILD_PATH" ]; then
+  echo "Realized node_modules output: $BUILD_PATH"
+  CORRECT_HASH=$(nix hash path --sri "$BUILD_PATH" 2>/dev/null || true)
+fi
+
+if [ -z "$CORRECT_HASH" ]; then
+  CORRECT_HASH="$(grep -E 'got:\s+sha256-[A-Za-z0-9+/=]+' "$BUILD_LOG" | awk '{print $2}' | head -n1 || true)"
+
+  if [ -z "$CORRECT_HASH" ]; then
+    CORRECT_HASH="$(grep -A2 'hash mismatch' "$BUILD_LOG" | grep 'got:' | awk '{print $2}' | sed 's/sha256:/sha256-/' || true)"
+  fi
+
+  if [ -z "$CORRECT_HASH" ]; then
+    echo "Searching for kept failed build directory..."
+    KEPT_DIR=$(grep -oE "build directory.*'[^']+'" "$BUILD_LOG" | grep -oE "'/[^']+'" | tr -d "'" | head -n1)
+
+    if [ -z "$KEPT_DIR" ]; then
+      KEPT_DIR=$(grep -oE '/nix/var/nix/builds/[^ ]+' "$BUILD_LOG" | head -n1)
+    fi
+
+    if [ -n "$KEPT_DIR" ] && [ -d "$KEPT_DIR" ]; then
+      echo "Found kept build directory: $KEPT_DIR"
+      if [ -d "$KEPT_DIR/build" ]; then
+        HASH_PATH="$KEPT_DIR/build"
+      else
+        HASH_PATH="$KEPT_DIR"
+      fi
+
+      echo "Attempting to hash: $HASH_PATH"
+      ls -la "$HASH_PATH" || true
+
+      if [ -d "$HASH_PATH/node_modules" ]; then
+        CORRECT_HASH=$(nix hash path --sri "$HASH_PATH" 2>/dev/null || true)
+        echo "Computed hash from kept build: $CORRECT_HASH"
+      fi
+    fi
+  fi
+fi
+
+if [ -z "$CORRECT_HASH" ]; then
+  echo "Failed to determine correct node_modules hash for ${SYSTEM}."
+  echo "Build log:"
+  cat "$BUILD_LOG"
+  exit 1
+fi
+
+write_node_modules_hash "$CORRECT_HASH"
+
+jq -e --arg hash "$CORRECT_HASH" '.nodeModules == $hash' "$HASH_FILE" >/dev/null
+
+echo "node_modules hash updated for ${SYSTEM}: $CORRECT_HASH"
+
+rm -f "$BUILD_LOG"
+unset BUILD_LOG

+ 10 - 5
packages/opencode/src/cli/cmd/tui/thread.ts

@@ -57,11 +57,16 @@ export const TuiThreadCommand = cmd({
     // Resolve relative paths against PWD to preserve behavior when using --cwd flag
     const baseCwd = process.env.PWD ?? process.cwd()
     const cwd = args.project ? path.resolve(baseCwd, args.project) : process.cwd()
-    let workerPath: string | URL = new URL("./worker.ts", import.meta.url)
-
-    if (typeof OPENCODE_WORKER_PATH !== "undefined") {
-      workerPath = OPENCODE_WORKER_PATH
-    }
+    const defaultWorker = new URL("./worker.ts", import.meta.url)
+    // Nix build creates a bundled worker next to the binary; prefer it when present.
+    const execDir = path.dirname(process.execPath)
+    const bundledWorker = path.join(execDir, "opencode-worker.js")
+    const hasBundledWorker = await Bun.file(bundledWorker).exists()
+    const workerPath = (() => {
+      if (typeof OPENCODE_WORKER_PATH !== "undefined") return OPENCODE_WORKER_PATH
+      if (hasBundledWorker) return bundledWorker
+      return defaultWorker
+    })()
     try {
       process.chdir(cwd)
     } catch (e) {

+ 7 - 0
packages/opencode/src/provider/models-macro.ts

@@ -1,4 +1,11 @@
 export async function data() {
+  const path = Bun.env.MODELS_DEV_API_JSON
+  if (path) {
+    const file = Bun.file(path)
+    if (await file.exists()) {
+      return await file.text()
+    }
+  }
   const json = await fetch("https://models.dev/api.json").then((x) => x.text())
   return json
 }

+ 12 - 2
packages/opencode/src/tool/bash.ts

@@ -11,6 +11,7 @@ import { $ } from "bun"
 import { Filesystem } from "@/util/filesystem"
 import { Wildcard } from "@/util/wildcard"
 import { Permission } from "@/permission"
+import { fileURLToPath } from "url"
 
 const MAX_OUTPUT_LENGTH = 30_000
 const DEFAULT_TIMEOUT = 1 * 60 * 1000
@@ -19,20 +20,29 @@ const SIGKILL_TIMEOUT_MS = 200
 
 export const log = Log.create({ service: "bash-tool" })
 
+const resolveWasm = (asset: string) => {
+  if (asset.startsWith("file://")) return fileURLToPath(asset)
+  if (asset.startsWith("/")) return asset
+  const url = new URL(asset, import.meta.url)
+  return fileURLToPath(url)
+}
+
 const parser = lazy(async () => {
   const { Parser } = await import("web-tree-sitter")
   const { default: treeWasm } = await import("web-tree-sitter/tree-sitter.wasm" as string, {
     with: { type: "wasm" },
   })
+  const treePath = resolveWasm(treeWasm)
   await Parser.init({
     locateFile() {
-      return treeWasm
+      return treePath
     },
   })
   const { default: bashWasm } = await import("tree-sitter-bash/tree-sitter-bash.wasm" as string, {
     with: { type: "wasm" },
   })
-  const bashLanguage = await Language.load(bashWasm)
+  const bashPath = resolveWasm(bashWasm)
+  const bashLanguage = await Language.load(bashPath)
   const p = new Parser()
   p.setLanguage(bashLanguage)
   return p