mirror of
https://github.com/aljazceru/opencode.git
synced 2025-12-25 03:34:22 +01:00
big format
This commit is contained in:
@@ -77,7 +77,8 @@ async function regenerateWindowsCmdWrappers() {
|
||||
|
||||
// npm_config_global is string | undefined
|
||||
// if it exists, the value is true
|
||||
const isGlobal = process.env.npm_config_global === "true" || pkgPath.includes(path.join("npm", "node_modules"))
|
||||
const isGlobal =
|
||||
process.env.npm_config_global === "true" || pkgPath.includes(path.join("npm", "node_modules"))
|
||||
|
||||
// The npm rebuild command does 2 things - Execute lifecycle scripts and rebuild bin links
|
||||
// We want to skip lifecycle scripts to avoid infinite loops, so we use --ignore-scripts
|
||||
@@ -93,7 +94,9 @@ async function regenerateWindowsCmdWrappers() {
|
||||
console.log("Successfully rebuilt npm bin links")
|
||||
} catch (error) {
|
||||
console.error("Error rebuilding npm links:", error.message)
|
||||
console.error("npm rebuild failed. You may need to manually run: npm rebuild opencode-ai --ignore-scripts")
|
||||
console.error(
|
||||
"npm rebuild failed. You may need to manually run: npm rebuild opencode-ai --ignore-scripts",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,12 +19,23 @@ const result = z.toJSONSchema(Config.Info, {
|
||||
const schema = ctx.jsonSchema
|
||||
|
||||
// Preserve strictness: set additionalProperties: false for objects
|
||||
if (schema && typeof schema === "object" && schema.type === "object" && schema.additionalProperties === undefined) {
|
||||
if (
|
||||
schema &&
|
||||
typeof schema === "object" &&
|
||||
schema.type === "object" &&
|
||||
schema.additionalProperties === undefined
|
||||
) {
|
||||
schema.additionalProperties = false
|
||||
}
|
||||
|
||||
// Add examples and default descriptions for string fields with defaults
|
||||
if (schema && typeof schema === "object" && "type" in schema && schema.type === "string" && schema?.default) {
|
||||
if (
|
||||
schema &&
|
||||
typeof schema === "object" &&
|
||||
"type" in schema &&
|
||||
schema.type === "string" &&
|
||||
schema?.default
|
||||
) {
|
||||
if (!schema.examples) {
|
||||
schema.examples = [schema.default]
|
||||
}
|
||||
|
||||
@@ -143,7 +143,18 @@ export namespace Agent {
|
||||
tools: {},
|
||||
builtIn: false,
|
||||
}
|
||||
const { name, model, prompt, tools, description, temperature, top_p, mode, permission, ...extra } = value
|
||||
const {
|
||||
name,
|
||||
model,
|
||||
prompt,
|
||||
tools,
|
||||
description,
|
||||
temperature,
|
||||
top_p,
|
||||
mode,
|
||||
permission,
|
||||
...extra
|
||||
} = value
|
||||
item.options = {
|
||||
...item.options,
|
||||
...extra,
|
||||
@@ -212,7 +223,10 @@ export namespace Agent {
|
||||
}
|
||||
}
|
||||
|
||||
function mergeAgentPermissions(basePermission: any, overridePermission: any): Agent.Info["permission"] {
|
||||
function mergeAgentPermissions(
|
||||
basePermission: any,
|
||||
overridePermission: any,
|
||||
): Agent.Info["permission"] {
|
||||
if (typeof basePermission.bash === "string") {
|
||||
basePermission.bash = {
|
||||
"*": basePermission.bash,
|
||||
|
||||
@@ -19,7 +19,10 @@ export namespace Bus {
|
||||
|
||||
const registry = new Map<string, EventDefinition>()
|
||||
|
||||
export function event<Type extends string, Properties extends ZodType>(type: Type, properties: Properties) {
|
||||
export function event<Type extends string, Properties extends ZodType>(
|
||||
type: Type,
|
||||
properties: Properties,
|
||||
) {
|
||||
const result = {
|
||||
type,
|
||||
properties,
|
||||
@@ -70,7 +73,10 @@ export namespace Bus {
|
||||
|
||||
export function subscribe<Definition extends EventDefinition>(
|
||||
def: Definition,
|
||||
callback: (event: { type: Definition["type"]; properties: z.infer<Definition["properties"]> }) => void,
|
||||
callback: (event: {
|
||||
type: Definition["type"]
|
||||
properties: z.infer<Definition["properties"]>
|
||||
}) => void,
|
||||
) {
|
||||
return raw(def.type, callback)
|
||||
}
|
||||
|
||||
@@ -14,7 +14,11 @@ export const AuthCommand = cmd({
|
||||
command: "auth",
|
||||
describe: "manage credentials",
|
||||
builder: (yargs) =>
|
||||
yargs.command(AuthLoginCommand).command(AuthLogoutCommand).command(AuthListCommand).demandCommand(),
|
||||
yargs
|
||||
.command(AuthLoginCommand)
|
||||
.command(AuthLogoutCommand)
|
||||
.command(AuthListCommand)
|
||||
.demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
@@ -60,7 +64,9 @@ export const AuthListCommand = cmd({
|
||||
prompts.log.info(`${provider} ${UI.Style.TEXT_DIM}${envVar}`)
|
||||
}
|
||||
|
||||
prompts.outro(`${activeEnvVars.length} environment variable` + (activeEnvVars.length === 1 ? "" : "s"))
|
||||
prompts.outro(
|
||||
`${activeEnvVars.length} environment variable` + (activeEnvVars.length === 1 ? "" : "s"),
|
||||
)
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -80,7 +86,9 @@ export const AuthLoginCommand = cmd({
|
||||
UI.empty()
|
||||
prompts.intro("Add credential")
|
||||
if (args.url) {
|
||||
const wellknown = await fetch(`${args.url}/.well-known/opencode`).then((x) => x.json() as any)
|
||||
const wellknown = await fetch(`${args.url}/.well-known/opencode`).then(
|
||||
(x) => x.json() as any,
|
||||
)
|
||||
prompts.log.info(`Running \`${wellknown.auth.command.join(" ")}\``)
|
||||
const proc = Bun.spawn({
|
||||
cmd: wellknown.auth.command,
|
||||
@@ -102,223 +110,224 @@ export const AuthLoginCommand = cmd({
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
await ModelsDev.refresh().catch(() => {})
|
||||
const providers = await ModelsDev.get()
|
||||
const priority: Record<string, number> = {
|
||||
opencode: 0,
|
||||
anthropic: 1,
|
||||
"github-copilot": 2,
|
||||
openai: 3,
|
||||
google: 4,
|
||||
openrouter: 5,
|
||||
vercel: 6,
|
||||
}
|
||||
let provider = await prompts.autocomplete({
|
||||
message: "Select provider",
|
||||
maxItems: 8,
|
||||
options: [
|
||||
...pipe(
|
||||
providers,
|
||||
values(),
|
||||
sortBy(
|
||||
(x) => priority[x.id] ?? 99,
|
||||
(x) => x.name ?? x.id,
|
||||
),
|
||||
map((x) => ({
|
||||
label: x.name,
|
||||
value: x.id,
|
||||
hint: priority[x.id] <= 1 ? "recommended" : undefined,
|
||||
})),
|
||||
),
|
||||
{
|
||||
value: "other",
|
||||
label: "Other",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
if (prompts.isCancel(provider)) throw new UI.CancelledError()
|
||||
|
||||
const plugin = await Plugin.list().then((x) => x.find((x) => x.auth?.provider === provider))
|
||||
if (plugin && plugin.auth) {
|
||||
let index = 0
|
||||
if (plugin.auth.methods.length > 1) {
|
||||
const method = await prompts.select({
|
||||
message: "Login method",
|
||||
options: [
|
||||
...plugin.auth.methods.map((x, index) => ({
|
||||
label: x.label,
|
||||
value: index.toString(),
|
||||
await ModelsDev.refresh().catch(() => {})
|
||||
const providers = await ModelsDev.get()
|
||||
const priority: Record<string, number> = {
|
||||
opencode: 0,
|
||||
anthropic: 1,
|
||||
"github-copilot": 2,
|
||||
openai: 3,
|
||||
google: 4,
|
||||
openrouter: 5,
|
||||
vercel: 6,
|
||||
}
|
||||
let provider = await prompts.autocomplete({
|
||||
message: "Select provider",
|
||||
maxItems: 8,
|
||||
options: [
|
||||
...pipe(
|
||||
providers,
|
||||
values(),
|
||||
sortBy(
|
||||
(x) => priority[x.id] ?? 99,
|
||||
(x) => x.name ?? x.id,
|
||||
),
|
||||
map((x) => ({
|
||||
label: x.name,
|
||||
value: x.id,
|
||||
hint: priority[x.id] <= 1 ? "recommended" : undefined,
|
||||
})),
|
||||
],
|
||||
})
|
||||
if (prompts.isCancel(method)) throw new UI.CancelledError()
|
||||
index = parseInt(method)
|
||||
}
|
||||
const method = plugin.auth.methods[index]
|
||||
),
|
||||
{
|
||||
value: "other",
|
||||
label: "Other",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
// Handle prompts for all auth types
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
const inputs: Record<string, string> = {}
|
||||
if (method.prompts) {
|
||||
for (const prompt of method.prompts) {
|
||||
if (prompt.condition && !prompt.condition(inputs)) {
|
||||
continue
|
||||
}
|
||||
if (prompt.type === "select") {
|
||||
const value = await prompts.select({
|
||||
message: prompt.message,
|
||||
options: prompt.options,
|
||||
})
|
||||
if (prompts.isCancel(value)) throw new UI.CancelledError()
|
||||
inputs[prompt.key] = value
|
||||
} else {
|
||||
const value = await prompts.text({
|
||||
message: prompt.message,
|
||||
placeholder: prompt.placeholder,
|
||||
validate: prompt.validate ? (v) => prompt.validate!(v ?? "") : undefined,
|
||||
})
|
||||
if (prompts.isCancel(value)) throw new UI.CancelledError()
|
||||
inputs[prompt.key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
if (prompts.isCancel(provider)) throw new UI.CancelledError()
|
||||
|
||||
if (method.type === "oauth") {
|
||||
const authorize = await method.authorize(inputs)
|
||||
|
||||
if (authorize.url) {
|
||||
prompts.log.info("Go to: " + authorize.url)
|
||||
}
|
||||
|
||||
if (authorize.method === "auto") {
|
||||
if (authorize.instructions) {
|
||||
prompts.log.info(authorize.instructions)
|
||||
}
|
||||
const spinner = prompts.spinner()
|
||||
spinner.start("Waiting for authorization...")
|
||||
const result = await authorize.callback()
|
||||
if (result.type === "failed") {
|
||||
spinner.stop("Failed to authorize", 1)
|
||||
}
|
||||
if (result.type === "success") {
|
||||
const saveProvider = result.provider ?? provider
|
||||
if ("refresh" in result) {
|
||||
const { type: _, provider: __, refresh, access, expires, ...extraFields } = result
|
||||
await Auth.set(saveProvider, {
|
||||
type: "oauth",
|
||||
refresh,
|
||||
access,
|
||||
expires,
|
||||
...extraFields,
|
||||
})
|
||||
}
|
||||
if ("key" in result) {
|
||||
await Auth.set(saveProvider, {
|
||||
type: "api",
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
spinner.stop("Login successful")
|
||||
}
|
||||
}
|
||||
|
||||
if (authorize.method === "code") {
|
||||
const code = await prompts.text({
|
||||
message: "Paste the authorization code here: ",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
const plugin = await Plugin.list().then((x) => x.find((x) => x.auth?.provider === provider))
|
||||
if (plugin && plugin.auth) {
|
||||
let index = 0
|
||||
if (plugin.auth.methods.length > 1) {
|
||||
const method = await prompts.select({
|
||||
message: "Login method",
|
||||
options: [
|
||||
...plugin.auth.methods.map((x, index) => ({
|
||||
label: x.label,
|
||||
value: index.toString(),
|
||||
})),
|
||||
],
|
||||
})
|
||||
if (prompts.isCancel(code)) throw new UI.CancelledError()
|
||||
const result = await authorize.callback(code)
|
||||
if (result.type === "failed") {
|
||||
prompts.log.error("Failed to authorize")
|
||||
}
|
||||
if (result.type === "success") {
|
||||
const saveProvider = result.provider ?? provider
|
||||
if ("refresh" in result) {
|
||||
const { type: _, provider: __, refresh, access, expires, ...extraFields } = result
|
||||
await Auth.set(saveProvider, {
|
||||
type: "oauth",
|
||||
refresh,
|
||||
access,
|
||||
expires,
|
||||
...extraFields,
|
||||
})
|
||||
if (prompts.isCancel(method)) throw new UI.CancelledError()
|
||||
index = parseInt(method)
|
||||
}
|
||||
const method = plugin.auth.methods[index]
|
||||
|
||||
// Handle prompts for all auth types
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
const inputs: Record<string, string> = {}
|
||||
if (method.prompts) {
|
||||
for (const prompt of method.prompts) {
|
||||
if (prompt.condition && !prompt.condition(inputs)) {
|
||||
continue
|
||||
}
|
||||
if ("key" in result) {
|
||||
if (prompt.type === "select") {
|
||||
const value = await prompts.select({
|
||||
message: prompt.message,
|
||||
options: prompt.options,
|
||||
})
|
||||
if (prompts.isCancel(value)) throw new UI.CancelledError()
|
||||
inputs[prompt.key] = value
|
||||
} else {
|
||||
const value = await prompts.text({
|
||||
message: prompt.message,
|
||||
placeholder: prompt.placeholder,
|
||||
validate: prompt.validate ? (v) => prompt.validate!(v ?? "") : undefined,
|
||||
})
|
||||
if (prompts.isCancel(value)) throw new UI.CancelledError()
|
||||
inputs[prompt.key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (method.type === "oauth") {
|
||||
const authorize = await method.authorize(inputs)
|
||||
|
||||
if (authorize.url) {
|
||||
prompts.log.info("Go to: " + authorize.url)
|
||||
}
|
||||
|
||||
if (authorize.method === "auto") {
|
||||
if (authorize.instructions) {
|
||||
prompts.log.info(authorize.instructions)
|
||||
}
|
||||
const spinner = prompts.spinner()
|
||||
spinner.start("Waiting for authorization...")
|
||||
const result = await authorize.callback()
|
||||
if (result.type === "failed") {
|
||||
spinner.stop("Failed to authorize", 1)
|
||||
}
|
||||
if (result.type === "success") {
|
||||
const saveProvider = result.provider ?? provider
|
||||
if ("refresh" in result) {
|
||||
const { type: _, provider: __, refresh, access, expires, ...extraFields } = result
|
||||
await Auth.set(saveProvider, {
|
||||
type: "oauth",
|
||||
refresh,
|
||||
access,
|
||||
expires,
|
||||
...extraFields,
|
||||
})
|
||||
}
|
||||
if ("key" in result) {
|
||||
await Auth.set(saveProvider, {
|
||||
type: "api",
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
spinner.stop("Login successful")
|
||||
}
|
||||
}
|
||||
|
||||
if (authorize.method === "code") {
|
||||
const code = await prompts.text({
|
||||
message: "Paste the authorization code here: ",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(code)) throw new UI.CancelledError()
|
||||
const result = await authorize.callback(code)
|
||||
if (result.type === "failed") {
|
||||
prompts.log.error("Failed to authorize")
|
||||
}
|
||||
if (result.type === "success") {
|
||||
const saveProvider = result.provider ?? provider
|
||||
if ("refresh" in result) {
|
||||
const { type: _, provider: __, refresh, access, expires, ...extraFields } = result
|
||||
await Auth.set(saveProvider, {
|
||||
type: "oauth",
|
||||
refresh,
|
||||
access,
|
||||
expires,
|
||||
...extraFields,
|
||||
})
|
||||
}
|
||||
if ("key" in result) {
|
||||
await Auth.set(saveProvider, {
|
||||
type: "api",
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
prompts.log.success("Login successful")
|
||||
}
|
||||
}
|
||||
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
|
||||
if (method.type === "api") {
|
||||
if (method.authorize) {
|
||||
const result = await method.authorize(inputs)
|
||||
if (result.type === "failed") {
|
||||
prompts.log.error("Failed to authorize")
|
||||
}
|
||||
if (result.type === "success") {
|
||||
const saveProvider = result.provider ?? provider
|
||||
await Auth.set(saveProvider, {
|
||||
type: "api",
|
||||
key: result.key,
|
||||
})
|
||||
prompts.log.success("Login successful")
|
||||
}
|
||||
prompts.log.success("Login successful")
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === "other") {
|
||||
provider = await prompts.text({
|
||||
message: "Enter provider id",
|
||||
validate: (x) =>
|
||||
x && x.match(/^[0-9a-z-]+$/) ? undefined : "a-z, 0-9 and hyphens only",
|
||||
})
|
||||
if (prompts.isCancel(provider)) throw new UI.CancelledError()
|
||||
provider = provider.replace(/^@ai-sdk\//, "")
|
||||
if (prompts.isCancel(provider)) throw new UI.CancelledError()
|
||||
prompts.log.warn(
|
||||
`This only stores a credential for ${provider} - you will need configure it in opencode.json, check the docs for examples.`,
|
||||
)
|
||||
}
|
||||
|
||||
if (provider === "amazon-bedrock") {
|
||||
prompts.log.info(
|
||||
"Amazon bedrock can be configured with standard AWS environment variables like AWS_BEARER_TOKEN_BEDROCK, AWS_PROFILE or AWS_ACCESS_KEY_ID",
|
||||
)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
|
||||
if (method.type === "api") {
|
||||
if (method.authorize) {
|
||||
const result = await method.authorize(inputs)
|
||||
if (result.type === "failed") {
|
||||
prompts.log.error("Failed to authorize")
|
||||
}
|
||||
if (result.type === "success") {
|
||||
const saveProvider = result.provider ?? provider
|
||||
await Auth.set(saveProvider, {
|
||||
type: "api",
|
||||
key: result.key,
|
||||
})
|
||||
prompts.log.success("Login successful")
|
||||
}
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
if (provider === "opencode") {
|
||||
prompts.log.info("Create an api key at https://opencode.ai/auth")
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === "other") {
|
||||
provider = await prompts.text({
|
||||
message: "Enter provider id",
|
||||
validate: (x) => (x && x.match(/^[0-9a-z-]+$/) ? undefined : "a-z, 0-9 and hyphens only"),
|
||||
if (provider === "vercel") {
|
||||
prompts.log.info("You can create an api key at https://vercel.link/ai-gateway-token")
|
||||
}
|
||||
|
||||
const key = await prompts.password({
|
||||
message: "Enter your API key",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(key)) throw new UI.CancelledError()
|
||||
await Auth.set(provider, {
|
||||
type: "api",
|
||||
key,
|
||||
})
|
||||
if (prompts.isCancel(provider)) throw new UI.CancelledError()
|
||||
provider = provider.replace(/^@ai-sdk\//, "")
|
||||
if (prompts.isCancel(provider)) throw new UI.CancelledError()
|
||||
prompts.log.warn(
|
||||
`This only stores a credential for ${provider} - you will need configure it in opencode.json, check the docs for examples.`,
|
||||
)
|
||||
}
|
||||
|
||||
if (provider === "amazon-bedrock") {
|
||||
prompts.log.info(
|
||||
"Amazon bedrock can be configured with standard AWS environment variables like AWS_BEARER_TOKEN_BEDROCK, AWS_PROFILE or AWS_ACCESS_KEY_ID",
|
||||
)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
|
||||
if (provider === "opencode") {
|
||||
prompts.log.info("Create an api key at https://opencode.ai/auth")
|
||||
}
|
||||
|
||||
if (provider === "vercel") {
|
||||
prompts.log.info("You can create an api key at https://vercel.link/ai-gateway-token")
|
||||
}
|
||||
|
||||
const key = await prompts.password({
|
||||
message: "Enter your API key",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(key)) throw new UI.CancelledError()
|
||||
await Auth.set(provider, {
|
||||
type: "api",
|
||||
key,
|
||||
})
|
||||
|
||||
prompts.outro("Done")
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
@@ -7,7 +7,11 @@ import { EOL } from "os"
|
||||
export const LSPCommand = cmd({
|
||||
command: "lsp",
|
||||
builder: (yargs) =>
|
||||
yargs.command(DiagnosticsCommand).command(SymbolsCommand).command(DocumentSymbolsCommand).demandCommand(),
|
||||
yargs
|
||||
.command(DiagnosticsCommand)
|
||||
.command(SymbolsCommand)
|
||||
.command(DocumentSymbolsCommand)
|
||||
.demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
|
||||
@@ -6,7 +6,8 @@ import { cmd } from "../cmd"
|
||||
|
||||
export const RipgrepCommand = cmd({
|
||||
command: "rg",
|
||||
builder: (yargs) => yargs.command(TreeCommand).command(FilesCommand).command(SearchCommand).demandCommand(),
|
||||
builder: (yargs) =>
|
||||
yargs.command(TreeCommand).command(FilesCommand).command(SearchCommand).demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
@@ -18,7 +19,9 @@ const TreeCommand = cmd({
|
||||
}),
|
||||
async handler(args) {
|
||||
await bootstrap(process.cwd(), async () => {
|
||||
process.stdout.write(await Ripgrep.tree({ cwd: Instance.directory, limit: args.limit }) + EOL)
|
||||
process.stdout.write(
|
||||
(await Ripgrep.tree({ cwd: Instance.directory, limit: args.limit })) + EOL,
|
||||
)
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
@@ -4,7 +4,8 @@ import { cmd } from "../cmd"
|
||||
|
||||
export const SnapshotCommand = cmd({
|
||||
command: "snapshot",
|
||||
builder: (yargs) => yargs.command(TrackCommand).command(PatchCommand).command(DiffCommand).demandCommand(),
|
||||
builder: (yargs) =>
|
||||
yargs.command(TrackCommand).command(PatchCommand).command(DiffCommand).demandCommand(),
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ export const GenerateCommand = {
|
||||
handler: async () => {
|
||||
const specs = await Server.openapi()
|
||||
const json = JSON.stringify(specs, null, 2)
|
||||
|
||||
|
||||
// Wait for stdout to finish writing before process.exit() is called
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
process.stdout.write(json, (err) => {
|
||||
|
||||
@@ -189,7 +189,9 @@ export const GithubInstallCommand = cmd({
|
||||
async function getAppInfo() {
|
||||
const project = Instance.project
|
||||
if (project.vcs !== "git") {
|
||||
prompts.log.error(`Could not find git repository. Please run this command from a git repository.`)
|
||||
prompts.log.error(
|
||||
`Could not find git repository. Please run this command from a git repository.`,
|
||||
)
|
||||
throw new UI.CancelledError()
|
||||
}
|
||||
|
||||
@@ -202,9 +204,13 @@ export const GithubInstallCommand = cmd({
|
||||
// ie. git@github.com:sst/opencode
|
||||
// ie. ssh://git@github.com/sst/opencode.git
|
||||
// ie. ssh://git@github.com/sst/opencode
|
||||
const parsed = info.match(/^(?:(?:https?|ssh):\/\/)?(?:git@)?github\.com[:/]([^/]+)\/([^/.]+?)(?:\.git)?$/)
|
||||
const parsed = info.match(
|
||||
/^(?:(?:https?|ssh):\/\/)?(?:git@)?github\.com[:/]([^/]+)\/([^/.]+?)(?:\.git)?$/,
|
||||
)
|
||||
if (!parsed) {
|
||||
prompts.log.error(`Could not find git repository. Please run this command from a git repository.`)
|
||||
prompts.log.error(
|
||||
`Could not find git repository. Please run this command from a git repository.`,
|
||||
)
|
||||
throw new UI.CancelledError()
|
||||
}
|
||||
const [, owner, repo] = parsed
|
||||
@@ -445,7 +451,9 @@ export const GithubRunCommand = cmd({
|
||||
const summary = await summarize(response)
|
||||
await pushToLocalBranch(summary)
|
||||
}
|
||||
const hasShared = prData.comments.nodes.some((c) => c.body.includes(`${shareBaseUrl}/s/${shareId}`))
|
||||
const hasShared = prData.comments.nodes.some((c) =>
|
||||
c.body.includes(`${shareBaseUrl}/s/${shareId}`),
|
||||
)
|
||||
await updateComment(`${response}${footer({ image: !hasShared })}`)
|
||||
}
|
||||
// Fork PR
|
||||
@@ -457,7 +465,9 @@ export const GithubRunCommand = cmd({
|
||||
const summary = await summarize(response)
|
||||
await pushToForkBranch(summary, prData)
|
||||
}
|
||||
const hasShared = prData.comments.nodes.some((c) => c.body.includes(`${shareBaseUrl}/s/${shareId}`))
|
||||
const hasShared = prData.comments.nodes.some((c) =>
|
||||
c.body.includes(`${shareBaseUrl}/s/${shareId}`),
|
||||
)
|
||||
await updateComment(`${response}${footer({ image: !hasShared })}`)
|
||||
}
|
||||
}
|
||||
@@ -547,8 +557,12 @@ export const GithubRunCommand = cmd({
|
||||
// ie. <img alt="Image" src="https://github.com/user-attachments/assets/xxxx" />
|
||||
// ie. [api.json](https://github.com/user-attachments/files/21433810/api.json)
|
||||
// ie. 
|
||||
const mdMatches = prompt.matchAll(/!?\[.*?\]\((https:\/\/github\.com\/user-attachments\/[^)]+)\)/gi)
|
||||
const tagMatches = prompt.matchAll(/<img .*?src="(https:\/\/github\.com\/user-attachments\/[^"]+)" \/>/gi)
|
||||
const mdMatches = prompt.matchAll(
|
||||
/!?\[.*?\]\((https:\/\/github\.com\/user-attachments\/[^)]+)\)/gi,
|
||||
)
|
||||
const tagMatches = prompt.matchAll(
|
||||
/<img .*?src="(https:\/\/github\.com\/user-attachments\/[^"]+)" \/>/gi,
|
||||
)
|
||||
const matches = [...mdMatches, ...tagMatches].sort((a, b) => a.index - b.index)
|
||||
console.log("Images", JSON.stringify(matches, null, 2))
|
||||
|
||||
@@ -573,7 +587,10 @@ export const GithubRunCommand = cmd({
|
||||
|
||||
// Replace img tag with file path, ie. @image.png
|
||||
const replacement = `@${filename}`
|
||||
prompt = prompt.slice(0, start + offset) + replacement + prompt.slice(start + offset + tag.length)
|
||||
prompt =
|
||||
prompt.slice(0, start + offset) +
|
||||
replacement +
|
||||
prompt.slice(start + offset + tag.length)
|
||||
offset += replacement.length - tag.length
|
||||
|
||||
const contentType = res.headers.get("content-type")
|
||||
@@ -856,7 +873,8 @@ Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"`
|
||||
throw new Error(`Failed to check permissions for user ${actor}: ${error}`)
|
||||
}
|
||||
|
||||
if (!["admin", "write"].includes(permission)) throw new Error(`User ${actor} does not have write permissions`)
|
||||
if (!["admin", "write"].includes(permission))
|
||||
throw new Error(`User ${actor} does not have write permissions`)
|
||||
}
|
||||
|
||||
async function createComment() {
|
||||
@@ -904,7 +922,9 @@ Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"`
|
||||
|
||||
return `<a href="${shareBaseUrl}/s/${shareId}"><img width="200" alt="${titleAlt}" src="https://social-cards.sst.dev/opencode-share/${title64}.png?model=${providerID}/${modelID}&version=${session.version}&id=${shareId}" /></a>\n`
|
||||
})()
|
||||
const shareUrl = shareId ? `[opencode session](${shareBaseUrl}/s/${shareId}) | ` : ""
|
||||
const shareUrl = shareId
|
||||
? `[opencode session](${shareBaseUrl}/s/${shareId}) | `
|
||||
: ""
|
||||
return `\n\n${image}${shareUrl}[github run](${runUrl})`
|
||||
}
|
||||
|
||||
@@ -1080,9 +1100,13 @@ query($owner: String!, $repo: String!, $number: Int!) {
|
||||
})
|
||||
.map((c) => `- ${c.author.login} at ${c.createdAt}: ${c.body}`)
|
||||
|
||||
const files = (pr.files.nodes || []).map((f) => `- ${f.path} (${f.changeType}) +${f.additions}/-${f.deletions}`)
|
||||
const files = (pr.files.nodes || []).map(
|
||||
(f) => `- ${f.path} (${f.changeType}) +${f.additions}/-${f.deletions}`,
|
||||
)
|
||||
const reviewData = (pr.reviews.nodes || []).map((r) => {
|
||||
const comments = (r.comments.nodes || []).map((c) => ` - ${c.path}:${c.line ?? "?"}: ${c.body}`)
|
||||
const comments = (r.comments.nodes || []).map(
|
||||
(c) => ` - ${c.path}:${c.line ?? "?"}: ${c.body}`,
|
||||
)
|
||||
return [
|
||||
`- ${r.author.login} at ${r.submittedAt}:`,
|
||||
` - Review body: ${r.body}`,
|
||||
@@ -1104,9 +1128,15 @@ query($owner: String!, $repo: String!, $number: Int!) {
|
||||
`Deletions: ${pr.deletions}`,
|
||||
`Total Commits: ${pr.commits.totalCount}`,
|
||||
`Changed Files: ${pr.files.nodes.length} files`,
|
||||
...(comments.length > 0 ? ["<pull_request_comments>", ...comments, "</pull_request_comments>"] : []),
|
||||
...(files.length > 0 ? ["<pull_request_changed_files>", ...files, "</pull_request_changed_files>"] : []),
|
||||
...(reviewData.length > 0 ? ["<pull_request_reviews>", ...reviewData, "</pull_request_reviews>"] : []),
|
||||
...(comments.length > 0
|
||||
? ["<pull_request_comments>", ...comments, "</pull_request_comments>"]
|
||||
: []),
|
||||
...(files.length > 0
|
||||
? ["<pull_request_changed_files>", ...files, "</pull_request_changed_files>"]
|
||||
: []),
|
||||
...(reviewData.length > 0
|
||||
? ["<pull_request_reviews>", ...reviewData, "</pull_request_reviews>"]
|
||||
: []),
|
||||
"</pull_request>",
|
||||
].join("\n")
|
||||
}
|
||||
|
||||
@@ -137,7 +137,9 @@ export const RunCommand = cmd({
|
||||
|
||||
const outputJsonEvent = (type: string, data: any) => {
|
||||
if (args.format === "json") {
|
||||
process.stdout.write(JSON.stringify({ type, timestamp: Date.now(), sessionID, ...data }) + EOL)
|
||||
process.stdout.write(
|
||||
JSON.stringify({ type, timestamp: Date.now(), sessionID, ...data }) + EOL,
|
||||
)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
@@ -157,7 +159,9 @@ export const RunCommand = cmd({
|
||||
const [tool, color] = TOOL[part.tool] ?? [part.tool, UI.Style.TEXT_INFO_BOLD]
|
||||
const title =
|
||||
part.state.title ||
|
||||
(Object.keys(part.state.input).length > 0 ? JSON.stringify(part.state.input) : "Unknown")
|
||||
(Object.keys(part.state.input).length > 0
|
||||
? JSON.stringify(part.state.input)
|
||||
: "Unknown")
|
||||
printEvent(color, tool, title)
|
||||
if (part.tool === "bash" && part.state.output?.trim()) {
|
||||
UI.println()
|
||||
@@ -280,7 +284,10 @@ export const RunCommand = cmd({
|
||||
}
|
||||
|
||||
const cfgResult = await sdk.config.get()
|
||||
if (cfgResult.data && (cfgResult.data.share === "auto" || Flag.OPENCODE_AUTO_SHARE || args.share)) {
|
||||
if (
|
||||
cfgResult.data &&
|
||||
(cfgResult.data.share === "auto" || Flag.OPENCODE_AUTO_SHARE || args.share)
|
||||
) {
|
||||
const shareResult = await sdk.session.share({ path: { id: sessionID } }).catch((error) => {
|
||||
if (error instanceof Error && error.message.includes("disabled")) {
|
||||
UI.println(UI.Style.TEXT_DANGER_BOLD + "! " + error.message)
|
||||
@@ -333,7 +340,10 @@ export const RunCommand = cmd({
|
||||
}
|
||||
|
||||
const cfgResult = await sdk.config.get()
|
||||
if (cfgResult.data && (cfgResult.data.share === "auto" || Flag.OPENCODE_AUTO_SHARE || args.share)) {
|
||||
if (
|
||||
cfgResult.data &&
|
||||
(cfgResult.data.share === "auto" || Flag.OPENCODE_AUTO_SHARE || args.share)
|
||||
) {
|
||||
const shareResult = await sdk.session.share({ path: { id: sessionID } }).catch((error) => {
|
||||
if (error instanceof Error && error.message.includes("disabled")) {
|
||||
UI.println(UI.Style.TEXT_DANGER_BOLD + "! " + error.message)
|
||||
|
||||
@@ -52,7 +52,11 @@ export function DialogModel() {
|
||||
description: provider.name,
|
||||
category: provider.name,
|
||||
})),
|
||||
filter((x) => Boolean(ref()?.filter) || !local.model.recent().find((y) => isDeepEqual(y, x.value))),
|
||||
filter(
|
||||
(x) =>
|
||||
Boolean(ref()?.filter) ||
|
||||
!local.model.recent().find((y) => isDeepEqual(y, x.value)),
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
|
||||
@@ -20,8 +20,8 @@ export function DialogSessionList() {
|
||||
|
||||
const deleteKeybind = "ctrl+d"
|
||||
|
||||
const currentSessionID = createMemo(() =>
|
||||
route.data.type === "session" ? route.data.sessionID : undefined
|
||||
const currentSessionID = createMemo(() =>
|
||||
route.data.type === "session" ? route.data.sessionID : undefined,
|
||||
)
|
||||
|
||||
const options = createMemo(() => {
|
||||
|
||||
@@ -3,9 +3,19 @@ import { TextAttributes } from "@opentui/core"
|
||||
import { For } from "solid-js"
|
||||
import { useTheme } from "@tui/context/theme"
|
||||
|
||||
const LOGO_LEFT = [` `, `█▀▀█ █▀▀█ █▀▀█ █▀▀▄`, `█░░█ █░░█ █▀▀▀ █░░█`, `▀▀▀▀ █▀▀▀ ▀▀▀▀ ▀ ▀`]
|
||||
const LOGO_LEFT = [
|
||||
` `,
|
||||
`█▀▀█ █▀▀█ █▀▀█ █▀▀▄`,
|
||||
`█░░█ █░░█ █▀▀▀ █░░█`,
|
||||
`▀▀▀▀ █▀▀▀ ▀▀▀▀ ▀ ▀`,
|
||||
]
|
||||
|
||||
const LOGO_RIGHT = [` ▄ `, `█▀▀▀ █▀▀█ █▀▀█ █▀▀█`, `█░░░ █░░█ █░░█ █▀▀▀`, `▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀`]
|
||||
const LOGO_RIGHT = [
|
||||
` ▄ `,
|
||||
`█▀▀▀ █▀▀█ █▀▀█ █▀▀█`,
|
||||
`█░░░ █░░█ █░░█ █▀▀▀`,
|
||||
`▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀`,
|
||||
]
|
||||
|
||||
export function Logo() {
|
||||
const { theme } = useTheme()
|
||||
|
||||
@@ -17,13 +17,11 @@ export const { use: useRoute, provider: RouteProvider } = createSimpleContext({
|
||||
init: (props: { data?: Route }) => {
|
||||
const [store, setStore] = createStore<Route>(
|
||||
props.data ??
|
||||
(
|
||||
process.env["OPENCODE_ROUTE"]
|
||||
(process.env["OPENCODE_ROUTE"]
|
||||
? JSON.parse(process.env["OPENCODE_ROUTE"])
|
||||
: {
|
||||
type: "home",
|
||||
}
|
||||
),
|
||||
type: "home",
|
||||
}),
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -269,6 +269,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
|
||||
},
|
||||
async sync(sessionID: string) {
|
||||
const now = Date.now()
|
||||
console.log("syncing", sessionID)
|
||||
const [session, messages, todo, diff] = await Promise.all([
|
||||
sdk.client.session.get({ path: { id: sessionID }, throwOnError: true }),
|
||||
sdk.client.session.messages({ path: { id: sessionID } }),
|
||||
|
||||
@@ -218,4 +218,4 @@
|
||||
"light": "nightOwlFg"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,9 @@ import { useRoute } from "@tui/context/route"
|
||||
export function DialogMessage(props: { messageID: string; sessionID: string }) {
|
||||
const sync = useSync()
|
||||
const sdk = useSDK()
|
||||
const message = createMemo(() => sync.data.message[props.sessionID]?.find((x) => x.id === props.messageID))
|
||||
const message = createMemo(() =>
|
||||
sync.data.message[props.sessionID]?.find((x) => x.id === props.messageID),
|
||||
)
|
||||
const route = useRoute()
|
||||
|
||||
return (
|
||||
|
||||
@@ -19,7 +19,9 @@ export function DialogTimeline(props: { sessionID: string; onMove: (messageID: s
|
||||
const result = [] as DialogSelectOption<string>[]
|
||||
for (const message of messages) {
|
||||
if (message.role !== "user") continue
|
||||
const part = (sync.data.part[message.id] ?? []).find((x) => x.type === "text" && !x.synthetic) as TextPart
|
||||
const part = (sync.data.part[message.id] ?? []).find(
|
||||
(x) => x.type === "text" && !x.synthetic,
|
||||
) as TextPart
|
||||
if (!part) continue
|
||||
result.push({
|
||||
title: part.text.replace(/\n/g, " "),
|
||||
@@ -33,5 +35,11 @@ export function DialogTimeline(props: { sessionID: string; onMove: (messageID: s
|
||||
return result
|
||||
})
|
||||
|
||||
return <DialogSelect onMove={(option) => props.onMove(option.value)} title="Timeline" options={options()} />
|
||||
return (
|
||||
<DialogSelect
|
||||
onMove={(option) => props.onMove(option.value)}
|
||||
title="Timeline"
|
||||
options={options()}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -105,14 +105,15 @@ export function Session() {
|
||||
const sidebarVisible = createMemo(() => sidebar() === "show" || (sidebar() === "auto" && wide()))
|
||||
const contentWidth = createMemo(() => dimensions().width - (sidebarVisible() ? 42 : 0) - 4)
|
||||
|
||||
createEffect(() => {
|
||||
sync.session.sync(route.sessionID).catch(() => {
|
||||
createEffect(async () => {
|
||||
await sync.session.sync(route.sessionID).catch(() => {
|
||||
toast.show({
|
||||
message: `Session not found: ${route.sessionID}`,
|
||||
variant: "error",
|
||||
})
|
||||
return navigate({ type: "home" })
|
||||
})
|
||||
scroll.scrollBy(100_000)
|
||||
})
|
||||
|
||||
const toast = useToast()
|
||||
|
||||
@@ -41,7 +41,12 @@ export const TuiSpawnCommand = cmd({
|
||||
)
|
||||
cwd = new URL("../../../../", import.meta.url).pathname
|
||||
} else cmd.push(process.execPath)
|
||||
cmd.push("attach", server.url.toString(), "--dir", args.project ? path.resolve(args.project) : process.cwd())
|
||||
cmd.push(
|
||||
"attach",
|
||||
server.url.toString(),
|
||||
"--dir",
|
||||
args.project ? path.resolve(args.project) : process.cwd(),
|
||||
)
|
||||
const proc = Bun.spawn({
|
||||
cmd,
|
||||
cwd,
|
||||
|
||||
@@ -53,7 +53,9 @@ export function DialogConfirm(props: DialogConfirmProps) {
|
||||
dialog.clear()
|
||||
}}
|
||||
>
|
||||
<text fg={key === store.active ? theme.background : theme.textMuted}>{Locale.titlecase(key)}</text>
|
||||
<text fg={key === store.active ? theme.background : theme.textMuted}>
|
||||
{Locale.titlecase(key)}
|
||||
</text>
|
||||
</box>
|
||||
)}
|
||||
</For>
|
||||
|
||||
@@ -5,7 +5,10 @@ import { join } from "node:path"
|
||||
import { CliRenderer } from "@opentui/core"
|
||||
|
||||
export namespace Editor {
|
||||
export async function open(opts: { value: string; renderer: CliRenderer }): Promise<string | undefined> {
|
||||
export async function open(opts: {
|
||||
value: string
|
||||
renderer: CliRenderer
|
||||
}): Promise<string | undefined> {
|
||||
const editor = process.env["EDITOR"]
|
||||
if (!editor) return
|
||||
|
||||
|
||||
@@ -27,7 +27,9 @@ export const UpgradeCommand = {
|
||||
const detectedMethod = await Installation.method()
|
||||
const method = (args.method as Installation.Method) ?? detectedMethod
|
||||
if (method === "unknown") {
|
||||
prompts.log.error(`opencode is installed to ${process.execPath} and may be managed by a package manager`)
|
||||
prompts.log.error(
|
||||
`opencode is installed to ${process.execPath} and may be managed by a package manager`,
|
||||
)
|
||||
const install = await prompts.select({
|
||||
message: "Install anyways?",
|
||||
options: [
|
||||
|
||||
@@ -574,7 +574,10 @@ export namespace Config {
|
||||
.object({
|
||||
apiKey: z.string().optional(),
|
||||
baseURL: z.string().optional(),
|
||||
enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"),
|
||||
enterpriseUrl: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("GitHub Enterprise URL for copilot authentication"),
|
||||
timeout: z
|
||||
.union([
|
||||
z
|
||||
|
||||
@@ -81,7 +81,9 @@ export namespace Fzf {
|
||||
})
|
||||
}
|
||||
if (config.extension === "zip") {
|
||||
const zipFileReader = new ZipReader(new BlobReader(new Blob([await Bun.file(archivePath).arrayBuffer()])))
|
||||
const zipFileReader = new ZipReader(
|
||||
new BlobReader(new Blob([await Bun.file(archivePath).arrayBuffer()])),
|
||||
)
|
||||
const entries = await zipFileReader.getEntries()
|
||||
let fzfEntry: any
|
||||
for (const entry of entries) {
|
||||
|
||||
@@ -161,7 +161,9 @@ export namespace Ripgrep {
|
||||
}
|
||||
if (config.extension === "zip") {
|
||||
if (config.extension === "zip") {
|
||||
const zipFileReader = new ZipReader(new BlobReader(new Blob([await Bun.file(archivePath).arrayBuffer()])))
|
||||
const zipFileReader = new ZipReader(
|
||||
new BlobReader(new Blob([await Bun.file(archivePath).arrayBuffer()])),
|
||||
)
|
||||
const entries = await zipFileReader.getEntries()
|
||||
let rgEntry: any
|
||||
for (const entry of entries) {
|
||||
@@ -354,7 +356,12 @@ export namespace Ripgrep {
|
||||
return lines.join("\n")
|
||||
}
|
||||
|
||||
export async function search(input: { cwd: string; pattern: string; glob?: string[]; limit?: number }) {
|
||||
export async function search(input: {
|
||||
cwd: string
|
||||
pattern: string
|
||||
glob?: string[]
|
||||
limit?: number
|
||||
}) {
|
||||
const args = [`${await filepath()}`, "--json", "--hidden", "--glob='!.git/*'"]
|
||||
|
||||
if (input.glob) {
|
||||
|
||||
@@ -27,7 +27,10 @@ export namespace FileTime {
|
||||
|
||||
export async function assert(sessionID: string, filepath: string) {
|
||||
const time = get(sessionID, filepath)
|
||||
if (!time) throw new Error(`You must read the file ${filepath} before overwriting it. Use the Read tool first`)
|
||||
if (!time)
|
||||
throw new Error(
|
||||
`You must read the file ${filepath} before overwriting it. Use the Read tool first`,
|
||||
)
|
||||
const stats = await Bun.file(filepath).stat()
|
||||
if (stats.mtime.getTime() > time.getTime()) {
|
||||
throw new Error(
|
||||
|
||||
@@ -51,8 +51,10 @@ export namespace FileWatcher {
|
||||
for (const evt of evts) {
|
||||
log.info("event", evt)
|
||||
if (evt.type === "create") Bus.publish(Event.Updated, { file: evt.path, event: "add" })
|
||||
if (evt.type === "update") Bus.publish(Event.Updated, { file: evt.path, event: "change" })
|
||||
if (evt.type === "delete") Bus.publish(Event.Updated, { file: evt.path, event: "unlink" })
|
||||
if (evt.type === "update")
|
||||
Bus.publish(Event.Updated, { file: evt.path, event: "change" })
|
||||
if (evt.type === "delete")
|
||||
Bus.publish(Event.Updated, { file: evt.path, event: "unlink" })
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -49,7 +49,11 @@ export namespace Identifier {
|
||||
return result
|
||||
}
|
||||
|
||||
export function create(prefix: keyof typeof prefixes, descending: boolean, timestamp?: number): string {
|
||||
export function create(
|
||||
prefix: keyof typeof prefixes,
|
||||
descending: boolean,
|
||||
timestamp?: number,
|
||||
): string {
|
||||
const currentTimestamp = timestamp ?? Date.now()
|
||||
|
||||
if (currentTimestamp !== lastTimestamp) {
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import path from "path"
|
||||
import { createMessageConnection, StreamMessageReader, StreamMessageWriter } from "vscode-jsonrpc/node"
|
||||
import {
|
||||
createMessageConnection,
|
||||
StreamMessageReader,
|
||||
StreamMessageWriter,
|
||||
} from "vscode-jsonrpc/node"
|
||||
import type { Diagnostic as VSCodeDiagnostic } from "vscode-languageserver-types"
|
||||
import { Log } from "../util/log"
|
||||
import { LANGUAGE_EXTENSIONS } from "./language"
|
||||
@@ -34,7 +38,11 @@ export namespace LSPClient {
|
||||
),
|
||||
}
|
||||
|
||||
export async function create(input: { serverID: string; server: LSPServer.Handle; root: string }) {
|
||||
export async function create(input: {
|
||||
serverID: string
|
||||
server: LSPServer.Handle
|
||||
root: string
|
||||
}) {
|
||||
const l = log.clone().tag("serverID", input.serverID)
|
||||
l.info("starting client")
|
||||
|
||||
@@ -129,7 +137,9 @@ export namespace LSPClient {
|
||||
},
|
||||
notify: {
|
||||
async open(input: { path: string }) {
|
||||
input.path = path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path)
|
||||
input.path = path.isAbsolute(input.path)
|
||||
? input.path
|
||||
: path.resolve(Instance.directory, input.path)
|
||||
const file = Bun.file(input.path)
|
||||
const text = await file.text()
|
||||
const extension = path.extname(input.path)
|
||||
@@ -171,13 +181,18 @@ export namespace LSPClient {
|
||||
return diagnostics
|
||||
},
|
||||
async waitForDiagnostics(input: { path: string }) {
|
||||
input.path = path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path)
|
||||
input.path = path.isAbsolute(input.path)
|
||||
? input.path
|
||||
: path.resolve(Instance.directory, input.path)
|
||||
log.info("waiting for diagnostics", input)
|
||||
let unsub: () => void
|
||||
return await withTimeout(
|
||||
new Promise<void>((resolve) => {
|
||||
unsub = Bus.subscribe(Event.Diagnostics, (event) => {
|
||||
if (event.properties.path === input.path && event.properties.serverID === result.serverID) {
|
||||
if (
|
||||
event.properties.path === input.path &&
|
||||
event.properties.serverID === result.serverID
|
||||
) {
|
||||
log.info("got diagnostics", input)
|
||||
unsub?.()
|
||||
resolve()
|
||||
|
||||
@@ -20,7 +20,7 @@ export namespace Patch {
|
||||
workdir?: string
|
||||
}
|
||||
|
||||
export type Hunk =
|
||||
export type Hunk =
|
||||
| { type: "add"; path: string; contents: string }
|
||||
| { type: "delete"; path: string }
|
||||
| { type: "update"; path: string; move_path?: string; chunks: UpdateFileChunk[] }
|
||||
@@ -71,60 +71,66 @@ export namespace Patch {
|
||||
}
|
||||
|
||||
// Parser implementation
|
||||
function parsePatchHeader(lines: string[], startIdx: number): { filePath: string; movePath?: string; nextIdx: number } | null {
|
||||
function parsePatchHeader(
|
||||
lines: string[],
|
||||
startIdx: number,
|
||||
): { filePath: string; movePath?: string; nextIdx: number } | null {
|
||||
const line = lines[startIdx]
|
||||
|
||||
|
||||
if (line.startsWith("*** Add File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
return filePath ? { filePath, nextIdx: startIdx + 1 } : null
|
||||
}
|
||||
|
||||
|
||||
if (line.startsWith("*** Delete File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
return filePath ? { filePath, nextIdx: startIdx + 1 } : null
|
||||
}
|
||||
|
||||
|
||||
if (line.startsWith("*** Update File:")) {
|
||||
const filePath = line.split(":", 2)[1]?.trim()
|
||||
let movePath: string | undefined
|
||||
let nextIdx = startIdx + 1
|
||||
|
||||
|
||||
// Check for move directive
|
||||
if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) {
|
||||
movePath = lines[nextIdx].split(":", 2)[1]?.trim()
|
||||
nextIdx++
|
||||
}
|
||||
|
||||
|
||||
return filePath ? { filePath, movePath, nextIdx } : null
|
||||
}
|
||||
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function parseUpdateFileChunks(lines: string[], startIdx: number): { chunks: UpdateFileChunk[]; nextIdx: number } {
|
||||
function parseUpdateFileChunks(
|
||||
lines: string[],
|
||||
startIdx: number,
|
||||
): { chunks: UpdateFileChunk[]; nextIdx: number } {
|
||||
const chunks: UpdateFileChunk[] = []
|
||||
let i = startIdx
|
||||
|
||||
|
||||
while (i < lines.length && !lines[i].startsWith("***")) {
|
||||
if (lines[i].startsWith("@@")) {
|
||||
// Parse context line
|
||||
const contextLine = lines[i].substring(2).trim()
|
||||
i++
|
||||
|
||||
|
||||
const oldLines: string[] = []
|
||||
const newLines: string[] = []
|
||||
let isEndOfFile = false
|
||||
|
||||
|
||||
// Parse change lines
|
||||
while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) {
|
||||
const changeLine = lines[i]
|
||||
|
||||
|
||||
if (changeLine === "*** End of File") {
|
||||
isEndOfFile = true
|
||||
i++
|
||||
break
|
||||
}
|
||||
|
||||
|
||||
if (changeLine.startsWith(" ")) {
|
||||
// Keep line - appears in both old and new
|
||||
const content = changeLine.substring(1)
|
||||
@@ -137,10 +143,10 @@ export namespace Patch {
|
||||
// Add line - only in new
|
||||
newLines.push(changeLine.substring(1))
|
||||
}
|
||||
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
|
||||
chunks.push({
|
||||
old_lines: oldLines,
|
||||
new_lines: newLines,
|
||||
@@ -151,26 +157,29 @@ export namespace Patch {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return { chunks, nextIdx: i }
|
||||
}
|
||||
|
||||
function parseAddFileContent(lines: string[], startIdx: number): { content: string; nextIdx: number } {
|
||||
function parseAddFileContent(
|
||||
lines: string[],
|
||||
startIdx: number,
|
||||
): { content: string; nextIdx: number } {
|
||||
let content = ""
|
||||
let i = startIdx
|
||||
|
||||
|
||||
while (i < lines.length && !lines[i].startsWith("***")) {
|
||||
if (lines[i].startsWith("+")) {
|
||||
content += lines[i].substring(1) + "\n"
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
|
||||
// Remove trailing newline
|
||||
if (content.endsWith("\n")) {
|
||||
content = content.slice(0, -1)
|
||||
}
|
||||
|
||||
|
||||
return { content, nextIdx: i }
|
||||
}
|
||||
|
||||
@@ -178,28 +187,28 @@ export namespace Patch {
|
||||
const lines = patchText.split("\n")
|
||||
const hunks: Hunk[] = []
|
||||
let i = 0
|
||||
|
||||
|
||||
// Look for Begin/End patch markers
|
||||
const beginMarker = "*** Begin Patch"
|
||||
const endMarker = "*** End Patch"
|
||||
|
||||
const beginIdx = lines.findIndex(line => line.trim() === beginMarker)
|
||||
const endIdx = lines.findIndex(line => line.trim() === endMarker)
|
||||
|
||||
|
||||
const beginIdx = lines.findIndex((line) => line.trim() === beginMarker)
|
||||
const endIdx = lines.findIndex((line) => line.trim() === endMarker)
|
||||
|
||||
if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) {
|
||||
throw new Error("Invalid patch format: missing Begin/End markers")
|
||||
}
|
||||
|
||||
|
||||
// Parse content between markers
|
||||
i = beginIdx + 1
|
||||
|
||||
|
||||
while (i < endIdx) {
|
||||
const header = parsePatchHeader(lines, i)
|
||||
if (!header) {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
if (lines[i].startsWith("*** Add File:")) {
|
||||
const { content, nextIdx } = parseAddFileContent(lines, header.nextIdx)
|
||||
hunks.push({
|
||||
@@ -227,18 +236,19 @@ export namespace Patch {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return { hunks }
|
||||
}
|
||||
|
||||
// Apply patch functionality
|
||||
export function maybeParseApplyPatch(argv: string[]):
|
||||
export function maybeParseApplyPatch(
|
||||
argv: string[],
|
||||
):
|
||||
| { type: MaybeApplyPatch.Body; args: ApplyPatchArgs }
|
||||
| { type: MaybeApplyPatch.PatchParseError; error: Error }
|
||||
| { type: MaybeApplyPatch.NotApplyPatch } {
|
||||
|
||||
const APPLY_PATCH_COMMANDS = ["apply_patch", "applypatch"]
|
||||
|
||||
|
||||
// Direct invocation: apply_patch <patch>
|
||||
if (argv.length === 2 && APPLY_PATCH_COMMANDS.includes(argv[0])) {
|
||||
try {
|
||||
@@ -257,13 +267,13 @@ export namespace Patch {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Bash heredoc form: bash -lc 'apply_patch <<"EOF" ...'
|
||||
if (argv.length === 3 && argv[0] === "bash" && argv[1] === "-lc") {
|
||||
// Simple extraction - in real implementation would need proper bash parsing
|
||||
const script = argv[2]
|
||||
const heredocMatch = script.match(/apply_patch\s*<<['"](\w+)['"]\s*\n([\s\S]*?)\n\1/)
|
||||
|
||||
|
||||
if (heredocMatch) {
|
||||
const patchContent = heredocMatch[2]
|
||||
try {
|
||||
@@ -283,7 +293,7 @@ export namespace Patch {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return { type: MaybeApplyPatch.NotApplyPatch }
|
||||
}
|
||||
|
||||
@@ -293,7 +303,10 @@ export namespace Patch {
|
||||
content: string
|
||||
}
|
||||
|
||||
export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate {
|
||||
export function deriveNewContentsFromChunks(
|
||||
filePath: string,
|
||||
chunks: UpdateFileChunk[],
|
||||
): ApplyPatchFileUpdate {
|
||||
// Read original file content
|
||||
let originalContent: string
|
||||
try {
|
||||
@@ -301,37 +314,41 @@ export namespace Patch {
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to read file ${filePath}: ${error}`)
|
||||
}
|
||||
|
||||
|
||||
let originalLines = originalContent.split("\n")
|
||||
|
||||
|
||||
// Drop trailing empty element for consistent line counting
|
||||
if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") {
|
||||
originalLines.pop()
|
||||
}
|
||||
|
||||
|
||||
const replacements = computeReplacements(originalLines, filePath, chunks)
|
||||
let newLines = applyReplacements(originalLines, replacements)
|
||||
|
||||
|
||||
// Ensure trailing newline
|
||||
if (newLines.length === 0 || newLines[newLines.length - 1] !== "") {
|
||||
newLines.push("")
|
||||
}
|
||||
|
||||
|
||||
const newContent = newLines.join("\n")
|
||||
|
||||
|
||||
// Generate unified diff
|
||||
const unifiedDiff = generateUnifiedDiff(originalContent, newContent)
|
||||
|
||||
|
||||
return {
|
||||
unified_diff: unifiedDiff,
|
||||
content: newContent,
|
||||
}
|
||||
}
|
||||
|
||||
function computeReplacements(originalLines: string[], filePath: string, chunks: UpdateFileChunk[]): Array<[number, number, string[]]> {
|
||||
function computeReplacements(
|
||||
originalLines: string[],
|
||||
filePath: string,
|
||||
chunks: UpdateFileChunk[],
|
||||
): Array<[number, number, string[]]> {
|
||||
const replacements: Array<[number, number, string[]]> = []
|
||||
let lineIndex = 0
|
||||
|
||||
|
||||
for (const chunk of chunks) {
|
||||
// Handle context-based seeking
|
||||
if (chunk.change_context) {
|
||||
@@ -341,21 +358,22 @@ export namespace Patch {
|
||||
}
|
||||
lineIndex = contextIdx + 1
|
||||
}
|
||||
|
||||
|
||||
// Handle pure addition (no old lines)
|
||||
if (chunk.old_lines.length === 0) {
|
||||
const insertionIdx = originalLines.length > 0 && originalLines[originalLines.length - 1] === ""
|
||||
? originalLines.length - 1
|
||||
: originalLines.length
|
||||
const insertionIdx =
|
||||
originalLines.length > 0 && originalLines[originalLines.length - 1] === ""
|
||||
? originalLines.length - 1
|
||||
: originalLines.length
|
||||
replacements.push([insertionIdx, 0, chunk.new_lines])
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
// Try to match old lines in the file
|
||||
let pattern = chunk.old_lines
|
||||
let newSlice = chunk.new_lines
|
||||
let found = seekSequence(originalLines, pattern, lineIndex)
|
||||
|
||||
|
||||
// Retry without trailing empty line if not found
|
||||
if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") {
|
||||
pattern = pattern.slice(0, -1)
|
||||
@@ -364,79 +382,82 @@ export namespace Patch {
|
||||
}
|
||||
found = seekSequence(originalLines, pattern, lineIndex)
|
||||
}
|
||||
|
||||
|
||||
if (found !== -1) {
|
||||
replacements.push([found, pattern.length, newSlice])
|
||||
lineIndex = found + pattern.length
|
||||
} else {
|
||||
throw new Error(
|
||||
`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`
|
||||
`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Sort replacements by index to apply in order
|
||||
replacements.sort((a, b) => a[0] - b[0])
|
||||
|
||||
|
||||
return replacements
|
||||
}
|
||||
|
||||
function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] {
|
||||
function applyReplacements(
|
||||
lines: string[],
|
||||
replacements: Array<[number, number, string[]]>,
|
||||
): string[] {
|
||||
// Apply replacements in reverse order to avoid index shifting
|
||||
const result = [...lines]
|
||||
|
||||
|
||||
for (let i = replacements.length - 1; i >= 0; i--) {
|
||||
const [startIdx, oldLen, newSegment] = replacements[i]
|
||||
|
||||
|
||||
// Remove old lines
|
||||
result.splice(startIdx, oldLen)
|
||||
|
||||
|
||||
// Insert new lines
|
||||
for (let j = 0; j < newSegment.length; j++) {
|
||||
result.splice(startIdx + j, 0, newSegment[j])
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function seekSequence(lines: string[], pattern: string[], startIndex: number): number {
|
||||
if (pattern.length === 0) return -1
|
||||
|
||||
|
||||
// Simple substring search implementation
|
||||
for (let i = startIndex; i <= lines.length - pattern.length; i++) {
|
||||
let matches = true
|
||||
|
||||
|
||||
for (let j = 0; j < pattern.length; j++) {
|
||||
if (lines[i + j] !== pattern[j]) {
|
||||
matches = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (matches) {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
function generateUnifiedDiff(oldContent: string, newContent: string): string {
|
||||
const oldLines = oldContent.split("\n")
|
||||
const newLines = newContent.split("\n")
|
||||
|
||||
|
||||
// Simple diff generation - in a real implementation you'd use a proper diff algorithm
|
||||
let diff = "@@ -1 +1 @@\n"
|
||||
|
||||
|
||||
// Find changes (simplified approach)
|
||||
const maxLen = Math.max(oldLines.length, newLines.length)
|
||||
let hasChanges = false
|
||||
|
||||
|
||||
for (let i = 0; i < maxLen; i++) {
|
||||
const oldLine = oldLines[i] || ""
|
||||
const newLine = newLines[i] || ""
|
||||
|
||||
|
||||
if (oldLine !== newLine) {
|
||||
if (oldLine) diff += `-${oldLine}\n`
|
||||
if (newLine) diff += `+${newLine}\n`
|
||||
@@ -445,7 +466,7 @@ export namespace Patch {
|
||||
diff += ` ${oldLine}\n`
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return hasChanges ? diff : ""
|
||||
}
|
||||
|
||||
@@ -454,11 +475,11 @@ export namespace Patch {
|
||||
if (hunks.length === 0) {
|
||||
throw new Error("No files were modified.")
|
||||
}
|
||||
|
||||
|
||||
const added: string[] = []
|
||||
const modified: string[] = []
|
||||
const deleted: string[] = []
|
||||
|
||||
|
||||
for (const hunk of hunks) {
|
||||
switch (hunk.type) {
|
||||
case "add":
|
||||
@@ -467,28 +488,28 @@ export namespace Patch {
|
||||
if (addDir !== "." && addDir !== "/") {
|
||||
await fs.mkdir(addDir, { recursive: true })
|
||||
}
|
||||
|
||||
|
||||
await fs.writeFile(hunk.path, hunk.contents, "utf-8")
|
||||
added.push(hunk.path)
|
||||
log.info(`Added file: ${hunk.path}`)
|
||||
break
|
||||
|
||||
|
||||
case "delete":
|
||||
await fs.unlink(hunk.path)
|
||||
deleted.push(hunk.path)
|
||||
log.info(`Deleted file: ${hunk.path}`)
|
||||
break
|
||||
|
||||
|
||||
case "update":
|
||||
const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks)
|
||||
|
||||
|
||||
if (hunk.move_path) {
|
||||
// Handle file move
|
||||
const moveDir = path.dirname(hunk.move_path)
|
||||
if (moveDir !== "." && moveDir !== "/") {
|
||||
await fs.mkdir(moveDir, { recursive: true })
|
||||
}
|
||||
|
||||
|
||||
await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8")
|
||||
await fs.unlink(hunk.path)
|
||||
modified.push(hunk.move_path)
|
||||
@@ -502,7 +523,7 @@ export namespace Patch {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return { added, modified, deleted }
|
||||
}
|
||||
|
||||
@@ -513,7 +534,10 @@ export namespace Patch {
|
||||
}
|
||||
|
||||
// Async version of maybeParseApplyPatchVerified
|
||||
export async function maybeParseApplyPatchVerified(argv: string[], cwd: string): Promise<
|
||||
export async function maybeParseApplyPatchVerified(
|
||||
argv: string[],
|
||||
cwd: string,
|
||||
): Promise<
|
||||
| { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction }
|
||||
| { type: MaybeApplyPatchVerified.CorrectnessError; error: Error }
|
||||
| { type: MaybeApplyPatchVerified.NotApplyPatch }
|
||||
@@ -530,18 +554,21 @@ export namespace Patch {
|
||||
// Not a patch, continue
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const result = maybeParseApplyPatch(argv)
|
||||
|
||||
|
||||
switch (result.type) {
|
||||
case MaybeApplyPatch.Body:
|
||||
const { args } = result
|
||||
const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd
|
||||
const changes = new Map<string, ApplyPatchFileChange>()
|
||||
|
||||
|
||||
for (const hunk of args.hunks) {
|
||||
const resolvedPath = path.resolve(effectiveCwd, hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path)
|
||||
|
||||
const resolvedPath = path.resolve(
|
||||
effectiveCwd,
|
||||
hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path,
|
||||
)
|
||||
|
||||
switch (hunk.type) {
|
||||
case "add":
|
||||
changes.set(resolvedPath, {
|
||||
@@ -549,7 +576,7 @@ export namespace Patch {
|
||||
content: hunk.contents,
|
||||
})
|
||||
break
|
||||
|
||||
|
||||
case "delete":
|
||||
// For delete, we need to read the current content
|
||||
const deletePath = path.resolve(effectiveCwd, hunk.path)
|
||||
@@ -566,7 +593,7 @@ export namespace Patch {
|
||||
}
|
||||
}
|
||||
break
|
||||
|
||||
|
||||
case "update":
|
||||
const updatePath = path.resolve(effectiveCwd, hunk.path)
|
||||
try {
|
||||
@@ -574,7 +601,9 @@ export namespace Patch {
|
||||
changes.set(resolvedPath, {
|
||||
type: "update",
|
||||
unified_diff: fileUpdate.unified_diff,
|
||||
move_path: hunk.move_path ? path.resolve(effectiveCwd, hunk.move_path) : undefined,
|
||||
move_path: hunk.move_path
|
||||
? path.resolve(effectiveCwd, hunk.move_path)
|
||||
: undefined,
|
||||
new_content: fileUpdate.content,
|
||||
})
|
||||
} catch (error) {
|
||||
@@ -586,7 +615,7 @@ export namespace Patch {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.Body,
|
||||
action: {
|
||||
@@ -595,15 +624,15 @@ export namespace Patch {
|
||||
cwd: effectiveCwd,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
case MaybeApplyPatch.PatchParseError:
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.CorrectnessError,
|
||||
error: result.error,
|
||||
}
|
||||
|
||||
|
||||
case MaybeApplyPatch.NotApplyPatch:
|
||||
return { type: MaybeApplyPatchVerified.NotApplyPatch }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,7 +40,8 @@ export namespace ProviderTransform {
|
||||
}
|
||||
|
||||
for (const msg of unique([...system, ...final])) {
|
||||
const shouldUseContentOptions = providerID !== "anthropic" && Array.isArray(msg.content) && msg.content.length > 0
|
||||
const shouldUseContentOptions =
|
||||
providerID !== "anthropic" && Array.isArray(msg.content) && msg.content.length > 0
|
||||
|
||||
if (shouldUseContentOptions) {
|
||||
const lastContent = msg.content[msg.content.length - 1]
|
||||
@@ -84,7 +85,11 @@ export namespace ProviderTransform {
|
||||
return undefined
|
||||
}
|
||||
|
||||
export function options(providerID: string, modelID: string, sessionID: string): Record<string, any> | undefined {
|
||||
export function options(
|
||||
providerID: string,
|
||||
modelID: string,
|
||||
sessionID: string,
|
||||
): Record<string, any> | undefined {
|
||||
const result: Record<string, any> = {}
|
||||
|
||||
if (providerID === "openai") {
|
||||
@@ -109,7 +114,11 @@ export namespace ProviderTransform {
|
||||
return result
|
||||
}
|
||||
|
||||
export function providerOptions(npm: string | undefined, providerID: string, options: { [x: string]: any }) {
|
||||
export function providerOptions(
|
||||
npm: string | undefined,
|
||||
providerID: string,
|
||||
options: { [x: string]: any },
|
||||
) {
|
||||
switch (npm) {
|
||||
case "@ai-sdk/openai":
|
||||
case "@ai-sdk/azure":
|
||||
@@ -142,7 +151,8 @@ export namespace ProviderTransform {
|
||||
|
||||
if (providerID === "anthropic") {
|
||||
const thinking = options?.["thinking"]
|
||||
const budgetTokens = typeof thinking?.["budgetTokens"] === "number" ? thinking["budgetTokens"] : 0
|
||||
const budgetTokens =
|
||||
typeof thinking?.["budgetTokens"] === "number" ? thinking["budgetTokens"] : 0
|
||||
const enabled = thinking?.["type"] === "enabled"
|
||||
if (enabled && budgetTokens > 0) {
|
||||
// Return text tokens so that text + thinking <= model cap, preferring 32k text when possible.
|
||||
|
||||
@@ -755,7 +755,7 @@ export namespace Server {
|
||||
),
|
||||
async (c) => {
|
||||
const messages = await Session.messages(c.req.valid("param").id)
|
||||
return c.json(messages)
|
||||
return c.json(messages.slice(-100))
|
||||
},
|
||||
)
|
||||
.get(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { streamText, type ModelMessage, LoadAPIKeyError, type StreamTextResult, type Tool as AITool } from "ai"
|
||||
import { streamText, type ModelMessage, type StreamTextResult, type Tool as AITool } from "ai"
|
||||
import { Session } from "."
|
||||
import { Identifier } from "../id/id"
|
||||
import { Instance } from "../project/instance"
|
||||
@@ -30,12 +30,17 @@ export namespace SessionCompaction {
|
||||
),
|
||||
}
|
||||
|
||||
export function isOverflow(input: { tokens: MessageV2.Assistant["tokens"]; model: ModelsDev.Model }) {
|
||||
export function isOverflow(input: {
|
||||
tokens: MessageV2.Assistant["tokens"]
|
||||
model: ModelsDev.Model
|
||||
}) {
|
||||
if (Flag.OPENCODE_DISABLE_AUTOCOMPACT) return false
|
||||
const context = input.model.limit.context
|
||||
if (context === 0) return false
|
||||
const count = input.tokens.input + input.tokens.cache.read + input.tokens.output
|
||||
const output = Math.min(input.model.limit.output, SessionPrompt.OUTPUT_TOKEN_MAX) || SessionPrompt.OUTPUT_TOKEN_MAX
|
||||
const output =
|
||||
Math.min(input.model.limit.output, SessionPrompt.OUTPUT_TOKEN_MAX) ||
|
||||
SessionPrompt.OUTPUT_TOKEN_MAX
|
||||
const usable = context - output
|
||||
return count > usable
|
||||
}
|
||||
@@ -87,9 +92,15 @@ export namespace SessionCompaction {
|
||||
}
|
||||
}
|
||||
|
||||
export async function run(input: { sessionID: string; providerID: string; modelID: string; signal?: AbortSignal }) {
|
||||
export async function run(input: {
|
||||
sessionID: string
|
||||
providerID: string
|
||||
modelID: string
|
||||
signal?: AbortSignal
|
||||
}) {
|
||||
if (!input.signal) SessionLock.assertUnlocked(input.sessionID)
|
||||
await using lock = input.signal === undefined ? SessionLock.acquire({ sessionID: input.sessionID }) : undefined
|
||||
await using lock =
|
||||
input.signal === undefined ? SessionLock.acquire({ sessionID: input.sessionID }) : undefined
|
||||
const signal = input.signal ?? lock!.signal
|
||||
|
||||
await Session.update(input.sessionID, (draft) => {
|
||||
@@ -113,7 +124,6 @@ export namespace SessionCompaction {
|
||||
role: "assistant",
|
||||
parentID: toSummarize.findLast((m) => m.info.role === "user")?.info.id!,
|
||||
sessionID: input.sessionID,
|
||||
system,
|
||||
mode: "build",
|
||||
path: {
|
||||
cwd: Instance.directory,
|
||||
@@ -150,7 +160,11 @@ export namespace SessionCompaction {
|
||||
// set to 0, we handle loop
|
||||
maxRetries: 0,
|
||||
model: model.language,
|
||||
providerOptions: ProviderTransform.providerOptions(model.npm, model.providerID, model.info.options),
|
||||
providerOptions: ProviderTransform.providerOptions(
|
||||
model.npm,
|
||||
model.providerID,
|
||||
model.info.options,
|
||||
),
|
||||
headers: model.info.headers,
|
||||
abortSignal: signal,
|
||||
onError(error) {
|
||||
@@ -230,7 +244,11 @@ export namespace SessionCompaction {
|
||||
error: e,
|
||||
})
|
||||
const error = MessageV2.fromError(e, { providerID: input.providerID })
|
||||
if (retries.count < retries.max && MessageV2.APIError.isInstance(error) && error.data.isRetryable) {
|
||||
if (
|
||||
retries.count < retries.max &&
|
||||
MessageV2.APIError.isInstance(error) &&
|
||||
error.data.isRetryable
|
||||
) {
|
||||
shouldRetry = true
|
||||
await Session.updatePart({
|
||||
id: Identifier.ascending("part"),
|
||||
|
||||
@@ -50,7 +50,10 @@ export namespace SessionLock {
|
||||
export function acquire(input: { sessionID: string }) {
|
||||
const lock = get(input.sessionID)
|
||||
if (lock) {
|
||||
throw new LockedError({ sessionID: input.sessionID, message: `Session ${input.sessionID} is locked` })
|
||||
throw new LockedError({
|
||||
sessionID: input.sessionID,
|
||||
message: `Session ${input.sessionID} is locked`,
|
||||
})
|
||||
}
|
||||
const controller = new AbortController()
|
||||
state().locks.set(input.sessionID, {
|
||||
|
||||
@@ -2,14 +2,23 @@ import z from "zod"
|
||||
import { Bus } from "../bus"
|
||||
import { NamedError } from "../util/error"
|
||||
import { Message } from "./message"
|
||||
import { APICallError, convertToModelMessages, LoadAPIKeyError, type ModelMessage, type UIMessage } from "ai"
|
||||
import {
|
||||
APICallError,
|
||||
convertToModelMessages,
|
||||
LoadAPIKeyError,
|
||||
type ModelMessage,
|
||||
type UIMessage,
|
||||
} from "ai"
|
||||
import { Identifier } from "../id/id"
|
||||
import { LSP } from "../lsp"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
|
||||
export namespace MessageV2 {
|
||||
export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({}))
|
||||
export const AbortedError = NamedError.create("MessageAbortedError", z.object({ message: z.string() }))
|
||||
export const AbortedError = NamedError.create(
|
||||
"MessageAbortedError",
|
||||
z.object({ message: z.string() }),
|
||||
)
|
||||
export const AuthError = NamedError.create(
|
||||
"ProviderAuthError",
|
||||
z.object({
|
||||
@@ -242,7 +251,12 @@ export namespace MessageV2 {
|
||||
export type ToolStateError = z.infer<typeof ToolStateError>
|
||||
|
||||
export const ToolState = z
|
||||
.discriminatedUnion("status", [ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError])
|
||||
.discriminatedUnion("status", [
|
||||
ToolStatePending,
|
||||
ToolStateRunning,
|
||||
ToolStateCompleted,
|
||||
ToolStateError,
|
||||
])
|
||||
.meta({
|
||||
ref: "ToolState",
|
||||
})
|
||||
@@ -313,7 +327,6 @@ export namespace MessageV2 {
|
||||
APIError.Schema,
|
||||
])
|
||||
.optional(),
|
||||
system: z.string().array(),
|
||||
parentID: z.string(),
|
||||
modelID: z.string(),
|
||||
providerID: z.string(),
|
||||
@@ -397,7 +410,6 @@ export namespace MessageV2 {
|
||||
tokens: v1.metadata.assistant!.tokens,
|
||||
modelID: v1.metadata.assistant!.modelID,
|
||||
providerID: v1.metadata.assistant!.providerID,
|
||||
system: v1.metadata.assistant!.system,
|
||||
mode: "build",
|
||||
error: v1.metadata.error,
|
||||
}
|
||||
@@ -440,7 +452,8 @@ export namespace MessageV2 {
|
||||
}
|
||||
}
|
||||
|
||||
const { title, time, ...metadata } = v1.metadata.tool[part.toolInvocation.toolCallId] ?? {}
|
||||
const { title, time, ...metadata } =
|
||||
v1.metadata.tool[part.toolInvocation.toolCallId] ?? {}
|
||||
if (part.toolInvocation.state === "call") {
|
||||
return {
|
||||
status: "running",
|
||||
@@ -541,7 +554,11 @@ export namespace MessageV2 {
|
||||
},
|
||||
]
|
||||
// text/plain and directory files are converted into text parts, ignore them
|
||||
if (part.type === "file" && part.mime !== "text/plain" && part.mime !== "application/x-directory")
|
||||
if (
|
||||
part.type === "file" &&
|
||||
part.mime !== "text/plain" &&
|
||||
part.mime !== "application/x-directory"
|
||||
)
|
||||
return [
|
||||
{
|
||||
type: "file",
|
||||
@@ -600,7 +617,9 @@ export namespace MessageV2 {
|
||||
state: "output-available",
|
||||
toolCallId: part.callID,
|
||||
input: part.state.input,
|
||||
output: part.state.time.compacted ? "[Old tool result content cleared]" : part.state.output,
|
||||
output: part.state.time.compacted
|
||||
? "[Old tool result content cleared]"
|
||||
: part.state.output,
|
||||
callProviderMetadata: part.metadata,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -51,9 +51,11 @@ export namespace Message {
|
||||
})
|
||||
export type ToolResult = z.infer<typeof ToolResult>
|
||||
|
||||
export const ToolInvocation = z.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]).meta({
|
||||
ref: "ToolInvocation",
|
||||
})
|
||||
export const ToolInvocation = z
|
||||
.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult])
|
||||
.meta({
|
||||
ref: "ToolInvocation",
|
||||
})
|
||||
export type ToolInvocation = z.infer<typeof ToolInvocation>
|
||||
|
||||
export const TextPart = z
|
||||
@@ -122,7 +124,14 @@ export namespace Message {
|
||||
export type StepStartPart = z.infer<typeof StepStartPart>
|
||||
|
||||
export const MessagePart = z
|
||||
.discriminatedUnion("type", [TextPart, ReasoningPart, ToolInvocationPart, SourceUrlPart, FilePart, StepStartPart])
|
||||
.discriminatedUnion("type", [
|
||||
TextPart,
|
||||
ReasoningPart,
|
||||
ToolInvocationPart,
|
||||
SourceUrlPart,
|
||||
FilePart,
|
||||
StepStartPart,
|
||||
])
|
||||
.meta({
|
||||
ref: "MessagePart",
|
||||
})
|
||||
@@ -140,7 +149,11 @@ export namespace Message {
|
||||
completed: z.number().optional(),
|
||||
}),
|
||||
error: z
|
||||
.discriminatedUnion("name", [AuthError.Schema, NamedError.Unknown.Schema, OutputLengthError.Schema])
|
||||
.discriminatedUnion("name", [
|
||||
AuthError.Schema,
|
||||
NamedError.Unknown.Schema,
|
||||
OutputLengthError.Schema,
|
||||
])
|
||||
.optional(),
|
||||
sessionID: z.string(),
|
||||
tool: z.record(
|
||||
|
||||
@@ -961,7 +961,6 @@ export namespace SessionPrompt {
|
||||
id: Identifier.ascending("message"),
|
||||
parentID,
|
||||
role: "assistant",
|
||||
system: input.system,
|
||||
mode: input.agent,
|
||||
path: {
|
||||
cwd: Instance.directory,
|
||||
@@ -1412,7 +1411,6 @@ export namespace SessionPrompt {
|
||||
id: Identifier.ascending("message"),
|
||||
sessionID: input.sessionID,
|
||||
parentID: userMsg.id,
|
||||
system: [],
|
||||
mode: input.agent,
|
||||
cost: 0,
|
||||
path: {
|
||||
@@ -1709,7 +1707,6 @@ export namespace SessionPrompt {
|
||||
id: Identifier.ascending("message"),
|
||||
sessionID: input.sessionID,
|
||||
parentID: userMsg.id,
|
||||
system: [],
|
||||
mode: agentName,
|
||||
cost: 0,
|
||||
path: {
|
||||
|
||||
@@ -45,7 +45,9 @@ export namespace SessionRevert {
|
||||
if (!revert) {
|
||||
if ((msg.info.id === input.messageID && !input.partID) || part.id === input.partID) {
|
||||
// if no useful parts left in message, same as reverting whole message
|
||||
const partID = remaining.some((item) => ["text", "tool"].includes(item.type)) ? input.partID : undefined
|
||||
const partID = remaining.some((item) => ["text", "tool"].includes(item.type))
|
||||
? input.partID
|
||||
: undefined
|
||||
revert = {
|
||||
messageID: !partID && lastUser ? lastUser.id : msg.info.id,
|
||||
partID,
|
||||
|
||||
@@ -24,7 +24,8 @@ export namespace SystemPrompt {
|
||||
|
||||
export function provider(modelID: string) {
|
||||
if (modelID.includes("gpt-5")) return [PROMPT_CODEX]
|
||||
if (modelID.includes("gpt-") || modelID.includes("o1") || modelID.includes("o3")) return [PROMPT_BEAST]
|
||||
if (modelID.includes("gpt-") || modelID.includes("o1") || modelID.includes("o3"))
|
||||
return [PROMPT_BEAST]
|
||||
if (modelID.includes("gemini-")) return [PROMPT_GEMINI]
|
||||
if (modelID.includes("claude")) return [PROMPT_ANTHROPIC]
|
||||
return [PROMPT_ANTHROPIC_WITHOUT_TODO]
|
||||
@@ -99,7 +100,11 @@ export namespace SystemPrompt {
|
||||
}),
|
||||
).catch(() => [])
|
||||
} else {
|
||||
matches = await Filesystem.globUp(instruction, Instance.directory, Instance.worktree).catch(() => [])
|
||||
matches = await Filesystem.globUp(
|
||||
instruction,
|
||||
Instance.directory,
|
||||
Instance.worktree,
|
||||
).catch(() => [])
|
||||
}
|
||||
matches.forEach((path) => paths.add(path))
|
||||
}
|
||||
|
||||
@@ -6,7 +6,9 @@ export namespace Todo {
|
||||
export const Info = z
|
||||
.object({
|
||||
content: z.string().describe("Brief description of the task"),
|
||||
status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"),
|
||||
status: z
|
||||
.string()
|
||||
.describe("Current status of the task: pending, in_progress, completed, cancelled"),
|
||||
priority: z.string().describe("Priority level of the task: high, medium, low"),
|
||||
id: z.string().describe("Unique identifier for the todo item"),
|
||||
})
|
||||
|
||||
@@ -50,7 +50,10 @@ export namespace Share {
|
||||
await sync("session/info/" + evt.properties.info.id, evt.properties.info)
|
||||
})
|
||||
Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
|
||||
await sync("session/message/" + evt.properties.info.sessionID + "/" + evt.properties.info.id, evt.properties.info)
|
||||
await sync(
|
||||
"session/message/" + evt.properties.info.sessionID + "/" + evt.properties.info.id,
|
||||
evt.properties.info,
|
||||
)
|
||||
})
|
||||
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
|
||||
await sync(
|
||||
@@ -67,7 +70,9 @@ export namespace Share {
|
||||
|
||||
export const URL =
|
||||
process.env["OPENCODE_API"] ??
|
||||
(Installation.isPreview() || Installation.isLocal() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
|
||||
(Installation.isPreview() || Installation.isLocal()
|
||||
? "https://api.dev.opencode.ai"
|
||||
: "https://api.opencode.ai")
|
||||
|
||||
export async function create(sessionID: string) {
|
||||
return fetch(`${URL}/share_create`, {
|
||||
|
||||
@@ -27,7 +27,11 @@ export namespace Snapshot {
|
||||
log.info("initialized")
|
||||
}
|
||||
await $`git --git-dir ${git} add .`.quiet().cwd(Instance.directory).nothrow()
|
||||
const hash = await $`git --git-dir ${git} write-tree`.quiet().cwd(Instance.directory).nothrow().text()
|
||||
const hash = await $`git --git-dir ${git} write-tree`
|
||||
.quiet()
|
||||
.cwd(Instance.directory)
|
||||
.nothrow()
|
||||
.text()
|
||||
log.info("tracking", { hash, cwd: Instance.directory, git })
|
||||
return hash.trim()
|
||||
}
|
||||
@@ -41,7 +45,10 @@ export namespace Snapshot {
|
||||
export async function patch(hash: string): Promise<Patch> {
|
||||
const git = gitdir()
|
||||
await $`git --git-dir ${git} add .`.quiet().cwd(Instance.directory).nothrow()
|
||||
const result = await $`git --git-dir ${git} diff --name-only ${hash} -- .`.quiet().cwd(Instance.directory).nothrow()
|
||||
const result = await $`git --git-dir ${git} diff --name-only ${hash} -- .`
|
||||
.quiet()
|
||||
.cwd(Instance.directory)
|
||||
.nothrow()
|
||||
|
||||
// If git diff fails, return empty patch
|
||||
if (result.exitCode !== 0) {
|
||||
@@ -64,10 +71,11 @@ export namespace Snapshot {
|
||||
export async function restore(snapshot: string) {
|
||||
log.info("restore", { commit: snapshot })
|
||||
const git = gitdir()
|
||||
const result = await $`git --git-dir=${git} read-tree ${snapshot} && git --git-dir=${git} checkout-index -a -f`
|
||||
.quiet()
|
||||
.cwd(Instance.worktree)
|
||||
.nothrow()
|
||||
const result =
|
||||
await $`git --git-dir=${git} read-tree ${snapshot} && git --git-dir=${git} checkout-index -a -f`
|
||||
.quiet()
|
||||
.cwd(Instance.worktree)
|
||||
.nothrow()
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
log.error("failed to restore snapshot", {
|
||||
@@ -113,7 +121,10 @@ export namespace Snapshot {
|
||||
export async function diff(hash: string) {
|
||||
const git = gitdir()
|
||||
await $`git --git-dir ${git} add .`.quiet().cwd(Instance.directory).nothrow()
|
||||
const result = await $`git --git-dir=${git} diff ${hash} -- .`.quiet().cwd(Instance.worktree).nothrow()
|
||||
const result = await $`git --git-dir=${git} diff ${hash} -- .`
|
||||
.quiet()
|
||||
.cwd(Instance.worktree)
|
||||
.nothrow()
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
log.warn("failed to get diff", {
|
||||
|
||||
@@ -23,8 +23,13 @@ export const EditTool = Tool.define("edit", {
|
||||
parameters: z.object({
|
||||
filePath: z.string().describe("The absolute path to the file to modify"),
|
||||
oldString: z.string().describe("The text to replace"),
|
||||
newString: z.string().describe("The text to replace it with (must be different from oldString)"),
|
||||
replaceAll: z.boolean().optional().describe("Replace all occurrences of oldString (default false)"),
|
||||
newString: z
|
||||
.string()
|
||||
.describe("The text to replace it with (must be different from oldString)"),
|
||||
replaceAll: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe("Replace all occurrences of oldString (default false)"),
|
||||
}),
|
||||
async execute(params, ctx) {
|
||||
if (!params.filePath) {
|
||||
@@ -35,7 +40,9 @@ export const EditTool = Tool.define("edit", {
|
||||
throw new Error("oldString and newString must be different")
|
||||
}
|
||||
|
||||
const filePath = path.isAbsolute(params.filePath) ? params.filePath : path.join(Instance.directory, params.filePath)
|
||||
const filePath = path.isAbsolute(params.filePath)
|
||||
? params.filePath
|
||||
: path.join(Instance.directory, params.filePath)
|
||||
if (!Filesystem.contains(Instance.directory, filePath)) {
|
||||
const parentDir = path.dirname(filePath)
|
||||
await Permission.ask({
|
||||
@@ -172,7 +179,11 @@ function levenshtein(a: string, b: string): number {
|
||||
for (let i = 1; i <= a.length; i++) {
|
||||
for (let j = 1; j <= b.length; j++) {
|
||||
const cost = a[i - 1] === b[j - 1] ? 0 : 1
|
||||
matrix[i][j] = Math.min(matrix[i - 1][j] + 1, matrix[i][j - 1] + 1, matrix[i - 1][j - 1] + cost)
|
||||
matrix[i][j] = Math.min(
|
||||
matrix[i - 1][j] + 1,
|
||||
matrix[i][j - 1] + 1,
|
||||
matrix[i - 1][j - 1] + cost,
|
||||
)
|
||||
}
|
||||
}
|
||||
return matrix[a.length][b.length]
|
||||
@@ -374,7 +385,9 @@ export const WhitespaceNormalizedReplacer: Replacer = function* (content, find)
|
||||
// Find the actual substring in the original line that matches
|
||||
const words = find.trim().split(/\s+/)
|
||||
if (words.length > 0) {
|
||||
const pattern = words.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")).join("\\s+")
|
||||
const pattern = words
|
||||
.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"))
|
||||
.join("\\s+")
|
||||
try {
|
||||
const regex = new RegExp(pattern)
|
||||
const match = line.match(regex)
|
||||
@@ -612,7 +625,12 @@ export function trimDiff(diff: string): string {
|
||||
return trimmedLines.join("\n")
|
||||
}
|
||||
|
||||
export function replace(content: string, oldString: string, newString: string, replaceAll = false): string {
|
||||
export function replace(
|
||||
content: string,
|
||||
oldString: string,
|
||||
newString: string,
|
||||
replaceAll = false,
|
||||
): string {
|
||||
if (oldString === newString) {
|
||||
throw new Error("oldString and newString must be different")
|
||||
}
|
||||
|
||||
@@ -9,8 +9,14 @@ export const GrepTool = Tool.define("grep", {
|
||||
description: DESCRIPTION,
|
||||
parameters: z.object({
|
||||
pattern: z.string().describe("The regex pattern to search for in file contents"),
|
||||
path: z.string().optional().describe("The directory to search in. Defaults to the current working directory."),
|
||||
include: z.string().optional().describe('File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")'),
|
||||
path: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("The directory to search in. Defaults to the current working directory."),
|
||||
include: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")'),
|
||||
}),
|
||||
async execute(params) {
|
||||
if (!params.pattern) {
|
||||
|
||||
@@ -37,13 +37,18 @@ const LIMIT = 100
|
||||
export const ListTool = Tool.define("list", {
|
||||
description: DESCRIPTION,
|
||||
parameters: z.object({
|
||||
path: z.string().describe("The absolute path to the directory to list (must be absolute, not relative)").optional(),
|
||||
path: z
|
||||
.string()
|
||||
.describe("The absolute path to the directory to list (must be absolute, not relative)")
|
||||
.optional(),
|
||||
ignore: z.array(z.string()).describe("List of glob patterns to ignore").optional(),
|
||||
}),
|
||||
async execute(params) {
|
||||
const searchPath = path.resolve(Instance.directory, params.path || ".")
|
||||
|
||||
const ignoreGlobs = IGNORE_PATTERNS.map((p) => `!${p}*`).concat(params.ignore?.map((p) => `!${p}`) || [])
|
||||
const ignoreGlobs = IGNORE_PATTERNS.map((p) => `!${p}*`).concat(
|
||||
params.ignore?.map((p) => `!${p}`) || [],
|
||||
)
|
||||
const files = []
|
||||
for await (const file of Ripgrep.files({ cwd: searchPath, glob: ignoreGlobs })) {
|
||||
files.push(file)
|
||||
|
||||
@@ -11,7 +11,9 @@ export const LspDiagnosticTool = Tool.define("lsp_diagnostics", {
|
||||
path: z.string().describe("The path to the file to get diagnostics."),
|
||||
}),
|
||||
execute: async (args) => {
|
||||
const normalized = path.isAbsolute(args.path) ? args.path : path.join(Instance.directory, args.path)
|
||||
const normalized = path.isAbsolute(args.path)
|
||||
? args.path
|
||||
: path.join(Instance.directory, args.path)
|
||||
await LSP.touchFile(normalized, true)
|
||||
const diagnostics = await LSP.diagnostics()
|
||||
const file = diagnostics[normalized]
|
||||
|
||||
@@ -14,8 +14,13 @@ export const MultiEditTool = Tool.define("multiedit", {
|
||||
z.object({
|
||||
filePath: z.string().describe("The absolute path to the file to modify"),
|
||||
oldString: z.string().describe("The text to replace"),
|
||||
newString: z.string().describe("The text to replace it with (must be different from oldString)"),
|
||||
replaceAll: z.boolean().optional().describe("Replace all occurrences of oldString (default false)"),
|
||||
newString: z
|
||||
.string()
|
||||
.describe("The text to replace it with (must be different from oldString)"),
|
||||
replaceAll: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe("Replace all occurrences of oldString (default false)"),
|
||||
}),
|
||||
)
|
||||
.describe("Array of edit operations to perform sequentially on the file"),
|
||||
|
||||
@@ -18,7 +18,10 @@ export const ReadTool = Tool.define("read", {
|
||||
description: DESCRIPTION,
|
||||
parameters: z.object({
|
||||
filePath: z.string().describe("The path to the file to read"),
|
||||
offset: z.coerce.number().describe("The line number to start reading from (0-based)").optional(),
|
||||
offset: z.coerce
|
||||
.number()
|
||||
.describe("The line number to start reading from (0-based)")
|
||||
.optional(),
|
||||
limit: z.coerce.number().describe("The number of lines to read (defaults to 2000)").optional(),
|
||||
}),
|
||||
async execute(params, ctx) {
|
||||
@@ -53,13 +56,16 @@ export const ReadTool = Tool.define("read", {
|
||||
const suggestions = dirEntries
|
||||
.filter(
|
||||
(entry) =>
|
||||
entry.toLowerCase().includes(base.toLowerCase()) || base.toLowerCase().includes(entry.toLowerCase()),
|
||||
entry.toLowerCase().includes(base.toLowerCase()) ||
|
||||
base.toLowerCase().includes(entry.toLowerCase()),
|
||||
)
|
||||
.map((entry) => path.join(dir, entry))
|
||||
.slice(0, 3)
|
||||
|
||||
if (suggestions.length > 0) {
|
||||
throw new Error(`File not found: ${filepath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`)
|
||||
throw new Error(
|
||||
`File not found: ${filepath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`,
|
||||
)
|
||||
}
|
||||
|
||||
throw new Error(`File not found: ${filepath}`)
|
||||
|
||||
@@ -24,7 +24,12 @@ export namespace ToolRegistry {
|
||||
const glob = new Bun.Glob("tool/*.{js,ts}")
|
||||
|
||||
for (const dir of await Config.directories()) {
|
||||
for await (const match of glob.scan({ cwd: dir, absolute: true, followSymlinks: true, dot: true })) {
|
||||
for await (const match of glob.scan({
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
followSymlinks: true,
|
||||
dot: true,
|
||||
})) {
|
||||
const namespace = path.basename(match, path.extname(match))
|
||||
const mod = await import(match)
|
||||
for (const [id, def] of Object.entries<ToolDefinition>(mod)) {
|
||||
|
||||
@@ -14,7 +14,10 @@ export const TaskTool = Tool.define("task", async () => {
|
||||
const description = DESCRIPTION.replace(
|
||||
"{agents}",
|
||||
agents
|
||||
.map((a) => `- ${a.name}: ${a.description ?? "This subagent should only be called manually by the user."}`)
|
||||
.map(
|
||||
(a) =>
|
||||
`- ${a.name}: ${a.description ?? "This subagent should only be called manually by the user."}`,
|
||||
)
|
||||
.join("\n"),
|
||||
)
|
||||
return {
|
||||
@@ -26,7 +29,8 @@ export const TaskTool = Tool.define("task", async () => {
|
||||
}),
|
||||
async execute(params, ctx) {
|
||||
const agent = await Agent.get(params.subagent_type)
|
||||
if (!agent) throw new Error(`Unknown agent type: ${params.subagent_type} is not a valid agent type`)
|
||||
if (!agent)
|
||||
throw new Error(`Unknown agent type: ${params.subagent_type} is not a valid agent type`)
|
||||
const session = await Session.create({
|
||||
parentID: ctx.sessionID,
|
||||
title: params.description + ` (@${agent.name} subagent)`,
|
||||
@@ -91,7 +95,9 @@ export const TaskTool = Tool.define("task", async () => {
|
||||
let all
|
||||
all = await Session.messages(session.id)
|
||||
all = all.filter((x) => x.info.role === "assistant")
|
||||
all = all.flatMap((msg) => msg.parts.filter((x: any) => x.type === "tool") as MessageV2.ToolPart[])
|
||||
all = all.flatMap(
|
||||
(msg) => msg.parts.filter((x: any) => x.type === "tool") as MessageV2.ToolPart[],
|
||||
)
|
||||
return {
|
||||
title: params.description,
|
||||
metadata: {
|
||||
|
||||
@@ -48,13 +48,15 @@ export const WebFetchTool = Tool.define("webfetch", {
|
||||
let acceptHeader = "*/*"
|
||||
switch (params.format) {
|
||||
case "markdown":
|
||||
acceptHeader = "text/markdown;q=1.0, text/x-markdown;q=0.9, text/plain;q=0.8, text/html;q=0.7, */*;q=0.1"
|
||||
acceptHeader =
|
||||
"text/markdown;q=1.0, text/x-markdown;q=0.9, text/plain;q=0.8, text/html;q=0.7, */*;q=0.1"
|
||||
break
|
||||
case "text":
|
||||
acceptHeader = "text/plain;q=1.0, text/markdown;q=0.9, text/html;q=0.8, */*;q=0.1"
|
||||
break
|
||||
case "html":
|
||||
acceptHeader = "text/html;q=1.0, application/xhtml+xml;q=0.9, text/plain;q=0.8, text/markdown;q=0.7, */*;q=0.1"
|
||||
acceptHeader =
|
||||
"text/html;q=1.0, application/xhtml+xml;q=0.9, text/plain;q=0.8, text/markdown;q=0.7, */*;q=0.1"
|
||||
break
|
||||
default:
|
||||
acceptHeader =
|
||||
@@ -158,7 +160,9 @@ async function extractTextFromHTML(html: string) {
|
||||
.on("*", {
|
||||
element(element) {
|
||||
// Reset skip flag when entering other elements
|
||||
if (!["script", "style", "noscript", "iframe", "object", "embed"].includes(element.tagName)) {
|
||||
if (
|
||||
!["script", "style", "noscript", "iframe", "object", "embed"].includes(element.tagName)
|
||||
) {
|
||||
skipContent = false
|
||||
}
|
||||
},
|
||||
|
||||
@@ -15,10 +15,14 @@ export const WriteTool = Tool.define("write", {
|
||||
description: DESCRIPTION,
|
||||
parameters: z.object({
|
||||
content: z.string().describe("The content to write to the file"),
|
||||
filePath: z.string().describe("The absolute path to the file to write (must be absolute, not relative)"),
|
||||
filePath: z
|
||||
.string()
|
||||
.describe("The absolute path to the file to write (must be absolute, not relative)"),
|
||||
}),
|
||||
async execute(params, ctx) {
|
||||
const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(Instance.directory, params.filePath)
|
||||
const filepath = path.isAbsolute(params.filePath)
|
||||
? params.filePath
|
||||
: path.join(Instance.directory, params.filePath)
|
||||
if (!Filesystem.contains(Instance.directory, filepath)) {
|
||||
const parentDir = path.dirname(filepath)
|
||||
await Permission.ask({
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
export namespace Binary {
|
||||
export function search<T>(array: T[], id: string, compare: (item: T) => string): { found: boolean; index: number } {
|
||||
export function search<T>(
|
||||
array: T[],
|
||||
id: string,
|
||||
compare: (item: T) => string,
|
||||
): { found: boolean; index: number } {
|
||||
let left = 0
|
||||
let right = array.length - 1
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
export function defer<T extends () => void | Promise<void>>(
|
||||
fn: T,
|
||||
): T extends () => Promise<void> ? { [Symbol.asyncDispose]: () => Promise<void> } : { [Symbol.dispose]: () => void } {
|
||||
): T extends () => Promise<void>
|
||||
? { [Symbol.asyncDispose]: () => Promise<void> }
|
||||
: { [Symbol.dispose]: () => void } {
|
||||
return {
|
||||
[Symbol.dispose]() {
|
||||
fn()
|
||||
|
||||
@@ -4,11 +4,17 @@ export namespace EventLoop {
|
||||
export async function wait() {
|
||||
return new Promise<void>((resolve) => {
|
||||
const check = () => {
|
||||
const active = [...(process as any)._getActiveHandles(), ...(process as any)._getActiveRequests()]
|
||||
const active = [
|
||||
...(process as any)._getActiveHandles(),
|
||||
...(process as any)._getActiveRequests(),
|
||||
]
|
||||
Log.Default.info("eventloop", {
|
||||
active,
|
||||
})
|
||||
if ((process as any)._getActiveHandles().length === 0 && (process as any)._getActiveRequests().length === 0) {
|
||||
if (
|
||||
(process as any)._getActiveHandles().length === 0 &&
|
||||
(process as any)._getActiveRequests().length === 0
|
||||
) {
|
||||
resolve()
|
||||
} else {
|
||||
setImmediate(check)
|
||||
|
||||
@@ -39,7 +39,12 @@ export namespace Lock {
|
||||
}
|
||||
|
||||
// Clean up empty locks
|
||||
if (lock.readers === 0 && !lock.writer && lock.waitingReaders.length === 0 && lock.waitingWriters.length === 0) {
|
||||
if (
|
||||
lock.readers === 0 &&
|
||||
!lock.writer &&
|
||||
lock.waitingReaders.length === 0 &&
|
||||
lock.waitingWriters.length === 0
|
||||
) {
|
||||
locks.delete(key)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,10 @@ export namespace Rpc {
|
||||
}
|
||||
}
|
||||
return {
|
||||
call<Method extends keyof T>(method: Method, input: Parameters<T[Method]>[0]): Promise<ReturnType<T[Method]>> {
|
||||
call<Method extends keyof T>(
|
||||
method: Method,
|
||||
input: Parameters<T[Method]>[0],
|
||||
): Promise<ReturnType<T[Method]>> {
|
||||
const requestId = id++
|
||||
return new Promise((resolve) => {
|
||||
pending.set(requestId, resolve)
|
||||
|
||||
@@ -15,7 +15,11 @@ export namespace Wildcard {
|
||||
}
|
||||
|
||||
export function all(input: string, patterns: Record<string, any>) {
|
||||
const sorted = pipe(patterns, Object.entries, sortBy([([key]) => key.length, "asc"], [([key]) => key, "asc"]))
|
||||
const sorted = pipe(
|
||||
patterns,
|
||||
Object.entries,
|
||||
sortBy([([key]) => key.length, "asc"], [([key]) => key, "asc"]),
|
||||
)
|
||||
let result = undefined
|
||||
for (const [pattern, value] of sorted) {
|
||||
if (match(input, pattern)) {
|
||||
@@ -26,8 +30,15 @@ export namespace Wildcard {
|
||||
return result
|
||||
}
|
||||
|
||||
export function allStructured(input: { head: string; tail: string[] }, patterns: Record<string, any>) {
|
||||
const sorted = pipe(patterns, Object.entries, sortBy([([key]) => key.length, "asc"], [([key]) => key, "asc"]))
|
||||
export function allStructured(
|
||||
input: { head: string; tail: string[] },
|
||||
patterns: Record<string, any>,
|
||||
) {
|
||||
const sorted = pipe(
|
||||
patterns,
|
||||
Object.entries,
|
||||
sortBy([([key]) => key.length, "asc"], [([key]) => key, "asc"]),
|
||||
)
|
||||
let result = undefined
|
||||
for (const [pattern, value] of sorted) {
|
||||
const parts = pattern.split(/\s+/)
|
||||
|
||||
2
packages/opencode/sst-env.d.ts
vendored
2
packages/opencode/sst-env.d.ts
vendored
@@ -6,4 +6,4 @@
|
||||
/// <reference path="../../sst-env.d.ts" />
|
||||
|
||||
import "sst"
|
||||
export {}
|
||||
export {}
|
||||
|
||||
@@ -6,23 +6,23 @@ import { tmpdir } from "os"
|
||||
|
||||
describe("Patch namespace", () => {
|
||||
let tempDir: string
|
||||
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await fs.mkdtemp(path.join(tmpdir(), "patch-test-"))
|
||||
})
|
||||
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up temp directory
|
||||
await fs.rm(tempDir, { recursive: true, force: true })
|
||||
})
|
||||
|
||||
|
||||
describe("parsePatch", () => {
|
||||
test("should parse simple add file patch", () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: test.txt
|
||||
+Hello World
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = Patch.parsePatch(patchText)
|
||||
expect(result.hunks).toHaveLength(1)
|
||||
expect(result.hunks[0]).toEqual({
|
||||
@@ -31,19 +31,19 @@ describe("Patch namespace", () => {
|
||||
contents: "Hello World",
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
test("should parse delete file patch", () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Delete File: old.txt
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = Patch.parsePatch(patchText)
|
||||
expect(result.hunks).toHaveLength(1)
|
||||
const hunk = result.hunks[0]
|
||||
expect(hunk.type).toBe("delete")
|
||||
expect(hunk.path).toBe("old.txt")
|
||||
})
|
||||
|
||||
|
||||
test("should parse patch with multiple hunks", () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: new.txt
|
||||
@@ -54,13 +54,13 @@ describe("Patch namespace", () => {
|
||||
-new line
|
||||
+updated line
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = Patch.parsePatch(patchText)
|
||||
expect(result.hunks).toHaveLength(2)
|
||||
expect(result.hunks[0].type).toBe("add")
|
||||
expect(result.hunks[1].type).toBe("update")
|
||||
})
|
||||
|
||||
|
||||
test("should parse file move operation", () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: old-name.txt
|
||||
@@ -69,7 +69,7 @@ describe("Patch namespace", () => {
|
||||
-Old content
|
||||
+New content
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = Patch.parsePatch(patchText)
|
||||
expect(result.hunks).toHaveLength(1)
|
||||
const hunk = result.hunks[0]
|
||||
@@ -79,21 +79,21 @@ describe("Patch namespace", () => {
|
||||
expect(hunk.move_path).toBe("new-name.txt")
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
test("should throw error for invalid patch format", () => {
|
||||
const invalidPatch = `This is not a valid patch`
|
||||
|
||||
|
||||
expect(() => Patch.parsePatch(invalidPatch)).toThrow("Invalid patch format")
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
describe("maybeParseApplyPatch", () => {
|
||||
test("should parse direct apply_patch command", () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: test.txt
|
||||
+Content
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = Patch.maybeParseApplyPatch(["apply_patch", patchText])
|
||||
expect(result.type).toBe(Patch.MaybeApplyPatch.Body)
|
||||
if (result.type === Patch.MaybeApplyPatch.Body) {
|
||||
@@ -101,17 +101,17 @@ describe("Patch namespace", () => {
|
||||
expect(result.args.hunks).toHaveLength(1)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
test("should parse applypatch command", () => {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: test.txt
|
||||
+Content
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = Patch.maybeParseApplyPatch(["applypatch", patchText])
|
||||
expect(result.type).toBe(Patch.MaybeApplyPatch.Body)
|
||||
})
|
||||
|
||||
|
||||
test("should handle bash heredoc format", () => {
|
||||
const script = `apply_patch <<'PATCH'
|
||||
*** Begin Patch
|
||||
@@ -119,20 +119,20 @@ describe("Patch namespace", () => {
|
||||
+Content
|
||||
*** End Patch
|
||||
PATCH`
|
||||
|
||||
|
||||
const result = Patch.maybeParseApplyPatch(["bash", "-lc", script])
|
||||
expect(result.type).toBe(Patch.MaybeApplyPatch.Body)
|
||||
if (result.type === Patch.MaybeApplyPatch.Body) {
|
||||
expect(result.args.hunks).toHaveLength(1)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
test("should return NotApplyPatch for non-patch commands", () => {
|
||||
const result = Patch.maybeParseApplyPatch(["echo", "hello"])
|
||||
expect(result.type).toBe(Patch.MaybeApplyPatch.NotApplyPatch)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
describe("applyPatch", () => {
|
||||
test("should add a new file", async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
@@ -140,36 +140,39 @@ PATCH`
|
||||
+Hello World
|
||||
+This is a new file
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.added).toHaveLength(1)
|
||||
expect(result.modified).toHaveLength(0)
|
||||
expect(result.deleted).toHaveLength(0)
|
||||
|
||||
|
||||
const content = await fs.readFile(result.added[0], "utf-8")
|
||||
expect(content).toBe("Hello World\nThis is a new file")
|
||||
})
|
||||
|
||||
|
||||
test("should delete an existing file", async () => {
|
||||
const filePath = path.join(tempDir, "to-delete.txt")
|
||||
await fs.writeFile(filePath, "This file will be deleted")
|
||||
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
*** Delete File: ${filePath}
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.deleted).toHaveLength(1)
|
||||
expect(result.deleted[0]).toBe(filePath)
|
||||
|
||||
const exists = await fs.access(filePath).then(() => true).catch(() => false)
|
||||
|
||||
const exists = await fs
|
||||
.access(filePath)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
expect(exists).toBe(false)
|
||||
})
|
||||
|
||||
|
||||
test("should update an existing file", async () => {
|
||||
const filePath = path.join(tempDir, "to-update.txt")
|
||||
await fs.writeFile(filePath, "line 1\nline 2\nline 3\n")
|
||||
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${filePath}
|
||||
@@
|
||||
@@ -178,20 +181,20 @@ PATCH`
|
||||
+line 2 updated
|
||||
line 3
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
expect(result.modified[0]).toBe(filePath)
|
||||
|
||||
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe("line 1\nline 2 updated\nline 3\n")
|
||||
})
|
||||
|
||||
|
||||
test("should move and update a file", async () => {
|
||||
const oldPath = path.join(tempDir, "old-name.txt")
|
||||
const newPath = path.join(tempDir, "new-name.txt")
|
||||
await fs.writeFile(oldPath, "old content\n")
|
||||
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${oldPath}
|
||||
*** Move to: ${newPath}
|
||||
@@ -199,26 +202,29 @@ PATCH`
|
||||
-old content
|
||||
+new content
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
expect(result.modified[0]).toBe(newPath)
|
||||
|
||||
const oldExists = await fs.access(oldPath).then(() => true).catch(() => false)
|
||||
|
||||
const oldExists = await fs
|
||||
.access(oldPath)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
expect(oldExists).toBe(false)
|
||||
|
||||
|
||||
const newContent = await fs.readFile(newPath, "utf-8")
|
||||
expect(newContent).toBe("new content\n")
|
||||
})
|
||||
|
||||
|
||||
test("should handle multiple operations in one patch", async () => {
|
||||
const file1 = path.join(tempDir, "file1.txt")
|
||||
const file2 = path.join(tempDir, "file2.txt")
|
||||
const file3 = path.join(tempDir, "file3.txt")
|
||||
|
||||
|
||||
await fs.writeFile(file1, "content 1")
|
||||
await fs.writeFile(file2, "content 2")
|
||||
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: ${file3}
|
||||
+new file content
|
||||
@@ -228,95 +234,98 @@ PATCH`
|
||||
+updated content 1
|
||||
*** Delete File: ${file2}
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.added).toHaveLength(1)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
expect(result.deleted).toHaveLength(1)
|
||||
})
|
||||
|
||||
|
||||
test("should create parent directories when adding files", async () => {
|
||||
const nestedPath = path.join(tempDir, "deep", "nested", "file.txt")
|
||||
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: ${nestedPath}
|
||||
+Deep nested content
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.added).toHaveLength(1)
|
||||
expect(result.added[0]).toBe(nestedPath)
|
||||
|
||||
const exists = await fs.access(nestedPath).then(() => true).catch(() => false)
|
||||
|
||||
const exists = await fs
|
||||
.access(nestedPath)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
expect(exists).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
describe("error handling", () => {
|
||||
test("should throw error when updating non-existent file", async () => {
|
||||
const nonExistent = path.join(tempDir, "does-not-exist.txt")
|
||||
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${nonExistent}
|
||||
@@
|
||||
-old line
|
||||
+new line
|
||||
*** End Patch`
|
||||
|
||||
|
||||
await expect(Patch.applyPatch(patchText)).rejects.toThrow()
|
||||
})
|
||||
|
||||
|
||||
test("should throw error when deleting non-existent file", async () => {
|
||||
const nonExistent = path.join(tempDir, "does-not-exist.txt")
|
||||
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
*** Delete File: ${nonExistent}
|
||||
*** End Patch`
|
||||
|
||||
|
||||
await expect(Patch.applyPatch(patchText)).rejects.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
describe("edge cases", () => {
|
||||
test("should handle empty files", async () => {
|
||||
const emptyFile = path.join(tempDir, "empty.txt")
|
||||
await fs.writeFile(emptyFile, "")
|
||||
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${emptyFile}
|
||||
@@
|
||||
+First line
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
|
||||
|
||||
const content = await fs.readFile(emptyFile, "utf-8")
|
||||
expect(content).toBe("First line\n")
|
||||
})
|
||||
|
||||
|
||||
test("should handle files with no trailing newline", async () => {
|
||||
const filePath = path.join(tempDir, "no-newline.txt")
|
||||
await fs.writeFile(filePath, "no newline")
|
||||
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${filePath}
|
||||
@@
|
||||
-no newline
|
||||
+has newline now
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
|
||||
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe("has newline now\n")
|
||||
})
|
||||
|
||||
|
||||
test("should handle multiple update chunks in single file", async () => {
|
||||
const filePath = path.join(tempDir, "multi-chunk.txt")
|
||||
await fs.writeFile(filePath, "line 1\nline 2\nline 3\nline 4\n")
|
||||
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${filePath}
|
||||
@@
|
||||
@@ -328,12 +337,12 @@ PATCH`
|
||||
-line 4
|
||||
+LINE 4
|
||||
*** End Patch`
|
||||
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
|
||||
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe("line 1\nLINE 2\nline 3\nLINE 4\n")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -13,7 +13,9 @@ function apiError(headers?: Record<string, string>): MessageV2.APIError {
|
||||
describe("session.retry.getRetryDelayInMs", () => {
|
||||
test("doubles delay on each attempt when headers missing", () => {
|
||||
const error = apiError()
|
||||
const delays = Array.from({ length: 7 }, (_, index) => SessionRetry.getRetryDelayInMs(error, index + 1))
|
||||
const delays = Array.from({ length: 7 }, (_, index) =>
|
||||
SessionRetry.getRetryDelayInMs(error, index + 1),
|
||||
)
|
||||
expect(delays).toStrictEqual([2000, 4000, 8000, 16000, 32000, 64000, 128000])
|
||||
})
|
||||
|
||||
|
||||
@@ -24,9 +24,12 @@ test("allStructured matches command sequences", () => {
|
||||
"git status*": "allow",
|
||||
}
|
||||
expect(Wildcard.allStructured({ head: "git", tail: ["status", "--short"] }, rules)).toBe("allow")
|
||||
expect(Wildcard.allStructured({ head: "npm", tail: ["run", "build", "--watch"] }, { "npm run *": "allow" })).toBe(
|
||||
"allow",
|
||||
)
|
||||
expect(
|
||||
Wildcard.allStructured(
|
||||
{ head: "npm", tail: ["run", "build", "--watch"] },
|
||||
{ "npm run *": "allow" },
|
||||
),
|
||||
).toBe("allow")
|
||||
expect(Wildcard.allStructured({ head: "ls", tail: ["-la"] }, rules)).toBeUndefined()
|
||||
})
|
||||
|
||||
@@ -51,5 +54,7 @@ test("allStructured handles sed flags", () => {
|
||||
expect(Wildcard.allStructured({ head: "sed", tail: ["-i", "file"] }, rules)).toBe("ask")
|
||||
expect(Wildcard.allStructured({ head: "sed", tail: ["-i.bak", "file"] }, rules)).toBe("ask")
|
||||
expect(Wildcard.allStructured({ head: "sed", tail: ["-n", "1p", "file"] }, rules)).toBe("allow")
|
||||
expect(Wildcard.allStructured({ head: "sed", tail: ["-i", "-n", "/./p", "myfile.txt"] }, rules)).toBe("ask")
|
||||
expect(
|
||||
Wildcard.allStructured({ head: "sed", tail: ["-i", "-n", "/./p", "myfile.txt"] }, rules),
|
||||
).toBe("ask")
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user