mirror of
https://github.com/aljazceru/opencode.git
synced 2026-01-08 18:34:59 +01:00
feat: nix support for the nix folks (#3924)
Co-authored-by: opencode <opencode@sst.dev> Co-authored-by: opencode-agent[bot] <opencode-agent[bot]@users.noreply.github.com>
This commit is contained in:
117
nix/scripts/bun-build.ts
Normal file
117
nix/scripts/bun-build.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import solidPlugin from "./packages/opencode/node_modules/@opentui/solid/scripts/solid-plugin"
|
||||
import path from "path"
|
||||
import fs from "fs"
|
||||
|
||||
const version = "@VERSION@"
|
||||
const pkg = path.join(process.cwd(), "packages/opencode")
|
||||
const parser = fs.realpathSync(
|
||||
path.join(pkg, "./node_modules/@opentui/core/parser.worker.js"),
|
||||
)
|
||||
const worker = "./src/cli/cmd/tui/worker.ts"
|
||||
const target = process.env["BUN_COMPILE_TARGET"]
|
||||
|
||||
if (!target) {
|
||||
throw new Error("BUN_COMPILE_TARGET not set")
|
||||
}
|
||||
|
||||
process.chdir(pkg)
|
||||
|
||||
const manifestName = "opencode-assets.manifest"
|
||||
const manifestPath = path.join(pkg, manifestName)
|
||||
|
||||
const readTrackedAssets = () => {
|
||||
if (!fs.existsSync(manifestPath)) return []
|
||||
return fs
|
||||
.readFileSync(manifestPath, "utf8")
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0)
|
||||
}
|
||||
|
||||
const removeTrackedAssets = () => {
|
||||
for (const file of readTrackedAssets()) {
|
||||
const filePath = path.join(pkg, file)
|
||||
if (fs.existsSync(filePath)) {
|
||||
fs.rmSync(filePath, { force: true })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const assets = new Set<string>()
|
||||
|
||||
const addAsset = async (p: string) => {
|
||||
const file = path.basename(p)
|
||||
const dest = path.join(pkg, file)
|
||||
await Bun.write(dest, Bun.file(p))
|
||||
assets.add(file)
|
||||
}
|
||||
|
||||
removeTrackedAssets()
|
||||
|
||||
const result = await Bun.build({
|
||||
conditions: ["browser"],
|
||||
tsconfig: "./tsconfig.json",
|
||||
plugins: [solidPlugin],
|
||||
sourcemap: "external",
|
||||
entrypoints: ["./src/index.ts", parser, worker],
|
||||
define: {
|
||||
OPENCODE_VERSION: `'@VERSION@'`,
|
||||
OTUI_TREE_SITTER_WORKER_PATH: "/$bunfs/root/" + path.relative(pkg, parser).replace(/\\/g, "/"),
|
||||
OPENCODE_CHANNEL: "'latest'",
|
||||
},
|
||||
compile: {
|
||||
target,
|
||||
outfile: "opencode",
|
||||
execArgv: ["--user-agent=opencode/" + version, "--env-file=\"\"", "--"],
|
||||
windows: {},
|
||||
},
|
||||
})
|
||||
|
||||
if (!result.success) {
|
||||
console.error("Build failed!")
|
||||
for (const log of result.logs) {
|
||||
console.error(log)
|
||||
}
|
||||
throw new Error("Compilation failed")
|
||||
}
|
||||
|
||||
const assetOutputs = result.outputs?.filter((x) => x.kind === "asset") ?? []
|
||||
for (const x of assetOutputs) {
|
||||
await addAsset(x.path)
|
||||
}
|
||||
|
||||
const bundle = await Bun.build({
|
||||
entrypoints: [worker],
|
||||
tsconfig: "./tsconfig.json",
|
||||
plugins: [solidPlugin],
|
||||
target: "bun",
|
||||
outdir: "./.opencode-worker",
|
||||
sourcemap: "none",
|
||||
})
|
||||
|
||||
if (!bundle.success) {
|
||||
console.error("Worker build failed!")
|
||||
for (const log of bundle.logs) {
|
||||
console.error(log)
|
||||
}
|
||||
throw new Error("Worker compilation failed")
|
||||
}
|
||||
|
||||
const workerAssets = bundle.outputs?.filter((x) => x.kind === "asset") ?? []
|
||||
for (const x of workerAssets) {
|
||||
await addAsset(x.path)
|
||||
}
|
||||
|
||||
const output = bundle.outputs.find((x) => x.kind === "entry-point")
|
||||
if (!output) {
|
||||
throw new Error("Worker build produced no entry-point output")
|
||||
}
|
||||
|
||||
const dest = path.join(pkg, "opencode-worker.js")
|
||||
await Bun.write(dest, Bun.file(output.path))
|
||||
fs.rmSync(path.dirname(output.path), { recursive: true, force: true })
|
||||
|
||||
const list = Array.from(assets)
|
||||
await Bun.write(manifestPath, list.length > 0 ? list.join("\n") + "\n" : "")
|
||||
|
||||
console.log("Build successful!")
|
||||
94
nix/scripts/canonicalize-node-modules.ts
Normal file
94
nix/scripts/canonicalize-node-modules.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { lstat, mkdir, readdir, rm, symlink } from "fs/promises"
|
||||
import { join, relative } from "path"
|
||||
|
||||
type SemverLike = {
|
||||
valid: (value: string) => string | null
|
||||
rcompare: (left: string, right: string) => number
|
||||
}
|
||||
|
||||
type Entry = {
|
||||
dir: string
|
||||
version: string
|
||||
label: string
|
||||
}
|
||||
|
||||
const root = process.cwd()
|
||||
const bunRoot = join(root, "node_modules/.bun")
|
||||
const linkRoot = join(bunRoot, "node_modules")
|
||||
const directories = (await readdir(bunRoot)).sort()
|
||||
const versions = new Map<string, Entry[]>()
|
||||
|
||||
for (const entry of directories) {
|
||||
const full = join(bunRoot, entry)
|
||||
const info = await lstat(full)
|
||||
if (!info.isDirectory()) {
|
||||
continue
|
||||
}
|
||||
const marker = entry.lastIndexOf("@")
|
||||
if (marker <= 0) {
|
||||
continue
|
||||
}
|
||||
const slug = entry.slice(0, marker).replace(/\+/g, "/")
|
||||
const version = entry.slice(marker + 1)
|
||||
const list = versions.get(slug) ?? []
|
||||
list.push({ dir: full, version, label: entry })
|
||||
versions.set(slug, list)
|
||||
}
|
||||
|
||||
const semverModule = (await import(join(bunRoot, "node_modules/semver"))) as SemverLike | {
|
||||
default: SemverLike
|
||||
}
|
||||
const semver = "default" in semverModule ? semverModule.default : semverModule
|
||||
const selections = new Map<string, Entry>()
|
||||
|
||||
for (const [slug, list] of versions) {
|
||||
list.sort((a, b) => {
|
||||
const left = semver.valid(a.version)
|
||||
const right = semver.valid(b.version)
|
||||
if (left && right) {
|
||||
const delta = semver.rcompare(left, right)
|
||||
if (delta !== 0) {
|
||||
return delta
|
||||
}
|
||||
}
|
||||
if (left && !right) {
|
||||
return -1
|
||||
}
|
||||
if (!left && right) {
|
||||
return 1
|
||||
}
|
||||
return b.version.localeCompare(a.version)
|
||||
})
|
||||
selections.set(slug, list[0])
|
||||
}
|
||||
|
||||
await rm(linkRoot, { recursive: true, force: true })
|
||||
await mkdir(linkRoot, { recursive: true })
|
||||
|
||||
const rewrites: string[] = []
|
||||
|
||||
for (const [slug, entry] of Array.from(selections.entries()).sort((a, b) => a[0].localeCompare(b[0]))) {
|
||||
const parts = slug.split("/")
|
||||
const leaf = parts.pop()
|
||||
if (!leaf) {
|
||||
continue
|
||||
}
|
||||
const parent = join(linkRoot, ...parts)
|
||||
await mkdir(parent, { recursive: true })
|
||||
const linkPath = join(parent, leaf)
|
||||
const desired = join(entry.dir, "node_modules", slug)
|
||||
const relativeTarget = relative(parent, desired)
|
||||
const resolved = relativeTarget.length === 0 ? "." : relativeTarget
|
||||
await rm(linkPath, { recursive: true, force: true })
|
||||
await symlink(resolved, linkPath)
|
||||
rewrites.push(slug + " -> " + resolved)
|
||||
}
|
||||
|
||||
rewrites.sort()
|
||||
console.log("[canonicalize-node-modules] rebuilt", rewrites.length, "links")
|
||||
for (const line of rewrites.slice(0, 20)) {
|
||||
console.log(" ", line)
|
||||
}
|
||||
if (rewrites.length > 20) {
|
||||
console.log(" ...")
|
||||
}
|
||||
138
nix/scripts/normalize-bun-binaries.ts
Normal file
138
nix/scripts/normalize-bun-binaries.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import { lstat, mkdir, readdir, rm, symlink } from "fs/promises"
|
||||
import { join, relative } from "path"
|
||||
|
||||
type PackageManifest = {
|
||||
name?: string
|
||||
bin?: string | Record<string, string>
|
||||
}
|
||||
|
||||
const root = process.cwd()
|
||||
const bunRoot = join(root, "node_modules/.bun")
|
||||
const bunEntries = (await safeReadDir(bunRoot)).sort()
|
||||
let rewritten = 0
|
||||
|
||||
for (const entry of bunEntries) {
|
||||
const modulesRoot = join(bunRoot, entry, "node_modules")
|
||||
if (!(await exists(modulesRoot))) {
|
||||
continue
|
||||
}
|
||||
const binRoot = join(modulesRoot, ".bin")
|
||||
await rm(binRoot, { recursive: true, force: true })
|
||||
await mkdir(binRoot, { recursive: true })
|
||||
|
||||
const packageDirs = await collectPackages(modulesRoot)
|
||||
for (const packageDir of packageDirs) {
|
||||
const manifest = await readManifest(packageDir)
|
||||
if (!manifest) {
|
||||
continue
|
||||
}
|
||||
const binField = manifest.bin
|
||||
if (!binField) {
|
||||
continue
|
||||
}
|
||||
const seen = new Set<string>()
|
||||
if (typeof binField === "string") {
|
||||
const fallback = manifest.name ?? packageDir.split("/").pop()
|
||||
if (fallback) {
|
||||
await linkBinary(binRoot, fallback, packageDir, binField, seen)
|
||||
}
|
||||
} else {
|
||||
const entries = Object.entries(binField).sort((a, b) => a[0].localeCompare(b[0]))
|
||||
for (const [name, target] of entries) {
|
||||
await linkBinary(binRoot, name, packageDir, target, seen)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[normalize-bun-binaries] rewrote ${rewritten} links`)
|
||||
|
||||
async function collectPackages(modulesRoot: string) {
|
||||
const found: string[] = []
|
||||
const topLevel = (await safeReadDir(modulesRoot)).sort()
|
||||
for (const name of topLevel) {
|
||||
if (name === ".bin" || name === ".bun") {
|
||||
continue
|
||||
}
|
||||
const full = join(modulesRoot, name)
|
||||
if (!(await isDirectory(full))) {
|
||||
continue
|
||||
}
|
||||
if (name.startsWith("@")) {
|
||||
const scoped = (await safeReadDir(full)).sort()
|
||||
for (const child of scoped) {
|
||||
const scopedDir = join(full, child)
|
||||
if (await isDirectory(scopedDir)) {
|
||||
found.push(scopedDir)
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
found.push(full)
|
||||
}
|
||||
return found.sort()
|
||||
}
|
||||
|
||||
async function readManifest(dir: string) {
|
||||
const file = Bun.file(join(dir, "package.json"))
|
||||
if (!(await file.exists())) {
|
||||
return null
|
||||
}
|
||||
const data = (await file.json()) as PackageManifest
|
||||
return data
|
||||
}
|
||||
|
||||
async function linkBinary(binRoot: string, name: string, packageDir: string, target: string, seen: Set<string>) {
|
||||
if (!name || !target) {
|
||||
return
|
||||
}
|
||||
const normalizedName = normalizeBinName(name)
|
||||
if (seen.has(normalizedName)) {
|
||||
return
|
||||
}
|
||||
const resolved = join(packageDir, target)
|
||||
const script = Bun.file(resolved)
|
||||
if (!(await script.exists())) {
|
||||
return
|
||||
}
|
||||
seen.add(normalizedName)
|
||||
const destination = join(binRoot, normalizedName)
|
||||
const relativeTarget = relative(binRoot, resolved) || "."
|
||||
await rm(destination, { force: true })
|
||||
await symlink(relativeTarget, destination)
|
||||
rewritten++
|
||||
}
|
||||
|
||||
async function exists(path: string) {
|
||||
try {
|
||||
await lstat(path)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async function isDirectory(path: string) {
|
||||
try {
|
||||
const info = await lstat(path)
|
||||
return info.isDirectory()
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async function safeReadDir(path: string) {
|
||||
try {
|
||||
return await readdir(path)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeBinName(name: string) {
|
||||
const slash = name.lastIndexOf("/")
|
||||
if (slash >= 0) {
|
||||
return name.slice(slash + 1)
|
||||
}
|
||||
return name
|
||||
}
|
||||
112
nix/scripts/update-hashes.sh
Executable file
112
nix/scripts/update-hashes.sh
Executable file
@@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DUMMY="sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
|
||||
SYSTEM=${SYSTEM:-x86_64-linux}
|
||||
DEFAULT_HASH_FILE=${MODULES_HASH_FILE:-nix/hashes.json}
|
||||
HASH_FILE=${HASH_FILE:-$DEFAULT_HASH_FILE}
|
||||
|
||||
if [ ! -f "$HASH_FILE" ]; then
|
||||
cat >"$HASH_FILE" <<EOF
|
||||
{
|
||||
"nodeModules": "$DUMMY"
|
||||
}
|
||||
EOF
|
||||
fi
|
||||
|
||||
if git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
||||
if ! git ls-files --error-unmatch "$HASH_FILE" >/dev/null 2>&1; then
|
||||
git add -N "$HASH_FILE" >/dev/null 2>&1 || true
|
||||
fi
|
||||
fi
|
||||
|
||||
export DUMMY
|
||||
export NIX_KEEP_OUTPUTS=1
|
||||
export NIX_KEEP_DERIVATIONS=1
|
||||
|
||||
cleanup() {
|
||||
rm -f "${JSON_OUTPUT:-}" "${BUILD_LOG:-}" "${TMP_EXPR:-}"
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
write_node_modules_hash() {
|
||||
local value="$1"
|
||||
local temp
|
||||
temp=$(mktemp)
|
||||
jq --arg value "$value" '.nodeModules = $value' "$HASH_FILE" >"$temp"
|
||||
mv "$temp" "$HASH_FILE"
|
||||
}
|
||||
|
||||
TARGET="packages.${SYSTEM}.default"
|
||||
MODULES_ATTR=".#packages.${SYSTEM}.default.node_modules"
|
||||
CORRECT_HASH=""
|
||||
|
||||
DRV_PATH="$(nix eval --raw "${MODULES_ATTR}.drvPath")"
|
||||
|
||||
echo "Setting dummy node_modules outputHash for ${SYSTEM}..."
|
||||
write_node_modules_hash "$DUMMY"
|
||||
|
||||
BUILD_LOG=$(mktemp)
|
||||
JSON_OUTPUT=$(mktemp)
|
||||
|
||||
echo "Building node_modules for ${SYSTEM} to discover correct outputHash..."
|
||||
echo "Attempting to realize derivation: ${DRV_PATH}"
|
||||
REALISE_OUT=$(nix-store --realise "$DRV_PATH" --keep-failed 2>&1 | tee "$BUILD_LOG" || true)
|
||||
|
||||
BUILD_PATH=$(echo "$REALISE_OUT" | grep "^/nix/store/" | head -n1 || true)
|
||||
if [ -n "$BUILD_PATH" ] && [ -d "$BUILD_PATH" ]; then
|
||||
echo "Realized node_modules output: $BUILD_PATH"
|
||||
CORRECT_HASH=$(nix hash path --sri "$BUILD_PATH" 2>/dev/null || true)
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
CORRECT_HASH="$(grep -E 'got:\s+sha256-[A-Za-z0-9+/=]+' "$BUILD_LOG" | awk '{print $2}' | head -n1 || true)"
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
CORRECT_HASH="$(grep -A2 'hash mismatch' "$BUILD_LOG" | grep 'got:' | awk '{print $2}' | sed 's/sha256:/sha256-/' || true)"
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
echo "Searching for kept failed build directory..."
|
||||
KEPT_DIR=$(grep -oE "build directory.*'[^']+'" "$BUILD_LOG" | grep -oE "'/[^']+'" | tr -d "'" | head -n1)
|
||||
|
||||
if [ -z "$KEPT_DIR" ]; then
|
||||
KEPT_DIR=$(grep -oE '/nix/var/nix/builds/[^ ]+' "$BUILD_LOG" | head -n1)
|
||||
fi
|
||||
|
||||
if [ -n "$KEPT_DIR" ] && [ -d "$KEPT_DIR" ]; then
|
||||
echo "Found kept build directory: $KEPT_DIR"
|
||||
if [ -d "$KEPT_DIR/build" ]; then
|
||||
HASH_PATH="$KEPT_DIR/build"
|
||||
else
|
||||
HASH_PATH="$KEPT_DIR"
|
||||
fi
|
||||
|
||||
echo "Attempting to hash: $HASH_PATH"
|
||||
ls -la "$HASH_PATH" || true
|
||||
|
||||
if [ -d "$HASH_PATH/node_modules" ]; then
|
||||
CORRECT_HASH=$(nix hash path --sri "$HASH_PATH" 2>/dev/null || true)
|
||||
echo "Computed hash from kept build: $CORRECT_HASH"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$CORRECT_HASH" ]; then
|
||||
echo "Failed to determine correct node_modules hash for ${SYSTEM}."
|
||||
echo "Build log:"
|
||||
cat "$BUILD_LOG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
write_node_modules_hash "$CORRECT_HASH"
|
||||
|
||||
jq -e --arg hash "$CORRECT_HASH" '.nodeModules == $hash' "$HASH_FILE" >/dev/null
|
||||
|
||||
echo "node_modules hash updated for ${SYSTEM}: $CORRECT_HASH"
|
||||
|
||||
rm -f "$BUILD_LOG"
|
||||
unset BUILD_LOG
|
||||
Reference in New Issue
Block a user