core: make patch tool more reliable and consistent with other editing tools

The patch tool now works seamlessly alongside other file editing tools with improved
error handling and a more intuitive permission system. Users will experience:

- More reliable patch application with better error messages
- Consistent permission prompts that match other editing tools
- Smoother integration when applying complex multi-file changes
- Better feedback on what changes are being made before applying patches

This refactoring leverages the robust patch parsing engine while making the tool
feel native to the opencode workflow, reducing friction when making bulk changes
to your codebase.
This commit is contained in:
Dax Raad
2025-10-01 06:45:43 -04:00
parent 172aeaaf14
commit 41ce56494b
5 changed files with 1455 additions and 313 deletions

View File

@@ -0,0 +1,609 @@
import z from "zod"
import * as path from "path"
import * as fs from "fs/promises"
import { Log } from "../util/log"
export namespace Patch {
const log = Log.create({ service: "patch" })
// Schema definitions
export const PatchSchema = z.object({
patchText: z.string().describe("The full patch text that describes all changes to be made"),
})
export type PatchParams = z.infer<typeof PatchSchema>
// Core types matching the Rust implementation
export interface ApplyPatchArgs {
patch: string
hunks: Hunk[]
workdir?: string
}
export type Hunk =
| { type: "add"; path: string; contents: string }
| { type: "delete"; path: string }
| { type: "update"; path: string; move_path?: string; chunks: UpdateFileChunk[] }
export interface UpdateFileChunk {
old_lines: string[]
new_lines: string[]
change_context?: string
is_end_of_file?: boolean
}
export interface ApplyPatchAction {
changes: Map<string, ApplyPatchFileChange>
patch: string
cwd: string
}
export type ApplyPatchFileChange =
| { type: "add"; content: string }
| { type: "delete"; content: string }
| { type: "update"; unified_diff: string; move_path?: string; new_content: string }
export interface AffectedPaths {
added: string[]
modified: string[]
deleted: string[]
}
export enum ApplyPatchError {
ParseError = "ParseError",
IoError = "IoError",
ComputeReplacements = "ComputeReplacements",
ImplicitInvocation = "ImplicitInvocation",
}
export enum MaybeApplyPatch {
Body = "Body",
ShellParseError = "ShellParseError",
PatchParseError = "PatchParseError",
NotApplyPatch = "NotApplyPatch",
}
export enum MaybeApplyPatchVerified {
Body = "Body",
ShellParseError = "ShellParseError",
CorrectnessError = "CorrectnessError",
NotApplyPatch = "NotApplyPatch",
}
// Parser implementation
function parsePatchHeader(lines: string[], startIdx: number): { filePath: string; movePath?: string; nextIdx: number } | null {
const line = lines[startIdx]
if (line.startsWith("*** Add File:")) {
const filePath = line.split(":", 2)[1]?.trim()
return filePath ? { filePath, nextIdx: startIdx + 1 } : null
}
if (line.startsWith("*** Delete File:")) {
const filePath = line.split(":", 2)[1]?.trim()
return filePath ? { filePath, nextIdx: startIdx + 1 } : null
}
if (line.startsWith("*** Update File:")) {
const filePath = line.split(":", 2)[1]?.trim()
let movePath: string | undefined
let nextIdx = startIdx + 1
// Check for move directive
if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) {
movePath = lines[nextIdx].split(":", 2)[1]?.trim()
nextIdx++
}
return filePath ? { filePath, movePath, nextIdx } : null
}
return null
}
function parseUpdateFileChunks(lines: string[], startIdx: number): { chunks: UpdateFileChunk[]; nextIdx: number } {
const chunks: UpdateFileChunk[] = []
let i = startIdx
while (i < lines.length && !lines[i].startsWith("***")) {
if (lines[i].startsWith("@@")) {
// Parse context line
const contextLine = lines[i].substring(2).trim()
i++
const oldLines: string[] = []
const newLines: string[] = []
let isEndOfFile = false
// Parse change lines
while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) {
const changeLine = lines[i]
if (changeLine === "*** End of File") {
isEndOfFile = true
i++
break
}
if (changeLine.startsWith(" ")) {
// Keep line - appears in both old and new
const content = changeLine.substring(1)
oldLines.push(content)
newLines.push(content)
} else if (changeLine.startsWith("-")) {
// Remove line - only in old
oldLines.push(changeLine.substring(1))
} else if (changeLine.startsWith("+")) {
// Add line - only in new
newLines.push(changeLine.substring(1))
}
i++
}
chunks.push({
old_lines: oldLines,
new_lines: newLines,
change_context: contextLine || undefined,
is_end_of_file: isEndOfFile || undefined,
})
} else {
i++
}
}
return { chunks, nextIdx: i }
}
function parseAddFileContent(lines: string[], startIdx: number): { content: string; nextIdx: number } {
let content = ""
let i = startIdx
while (i < lines.length && !lines[i].startsWith("***")) {
if (lines[i].startsWith("+")) {
content += lines[i].substring(1) + "\n"
}
i++
}
// Remove trailing newline
if (content.endsWith("\n")) {
content = content.slice(0, -1)
}
return { content, nextIdx: i }
}
export function parsePatch(patchText: string): { hunks: Hunk[] } {
const lines = patchText.split("\n")
const hunks: Hunk[] = []
let i = 0
// Look for Begin/End patch markers
const beginMarker = "*** Begin Patch"
const endMarker = "*** End Patch"
const beginIdx = lines.findIndex(line => line.trim() === beginMarker)
const endIdx = lines.findIndex(line => line.trim() === endMarker)
if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) {
throw new Error("Invalid patch format: missing Begin/End markers")
}
// Parse content between markers
i = beginIdx + 1
while (i < endIdx) {
const header = parsePatchHeader(lines, i)
if (!header) {
i++
continue
}
if (lines[i].startsWith("*** Add File:")) {
const { content, nextIdx } = parseAddFileContent(lines, header.nextIdx)
hunks.push({
type: "add",
path: header.filePath,
contents: content,
})
i = nextIdx
} else if (lines[i].startsWith("*** Delete File:")) {
hunks.push({
type: "delete",
path: header.filePath,
})
i = header.nextIdx
} else if (lines[i].startsWith("*** Update File:")) {
const { chunks, nextIdx } = parseUpdateFileChunks(lines, header.nextIdx)
hunks.push({
type: "update",
path: header.filePath,
move_path: header.movePath,
chunks,
})
i = nextIdx
} else {
i++
}
}
return { hunks }
}
// Apply patch functionality
export function maybeParseApplyPatch(argv: string[]):
| { type: MaybeApplyPatch.Body; args: ApplyPatchArgs }
| { type: MaybeApplyPatch.PatchParseError; error: Error }
| { type: MaybeApplyPatch.NotApplyPatch } {
const APPLY_PATCH_COMMANDS = ["apply_patch", "applypatch"]
// Direct invocation: apply_patch <patch>
if (argv.length === 2 && APPLY_PATCH_COMMANDS.includes(argv[0])) {
try {
const { hunks } = parsePatch(argv[1])
return {
type: MaybeApplyPatch.Body,
args: {
patch: argv[1],
hunks,
},
}
} catch (error) {
return {
type: MaybeApplyPatch.PatchParseError,
error: error as Error,
}
}
}
// Bash heredoc form: bash -lc 'apply_patch <<"EOF" ...'
if (argv.length === 3 && argv[0] === "bash" && argv[1] === "-lc") {
// Simple extraction - in real implementation would need proper bash parsing
const script = argv[2]
const heredocMatch = script.match(/apply_patch\s*<<['"](\w+)['"]\s*\n([\s\S]*?)\n\1/)
if (heredocMatch) {
const patchContent = heredocMatch[2]
try {
const { hunks } = parsePatch(patchContent)
return {
type: MaybeApplyPatch.Body,
args: {
patch: patchContent,
hunks,
},
}
} catch (error) {
return {
type: MaybeApplyPatch.PatchParseError,
error: error as Error,
}
}
}
}
return { type: MaybeApplyPatch.NotApplyPatch }
}
// File content manipulation
interface ApplyPatchFileUpdate {
unified_diff: string
content: string
}
export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate {
// Read original file content
let originalContent: string
try {
originalContent = require("fs").readFileSync(filePath, "utf-8")
} catch (error) {
throw new Error(`Failed to read file ${filePath}: ${error}`)
}
let originalLines = originalContent.split("\n")
// Drop trailing empty element for consistent line counting
if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") {
originalLines.pop()
}
const replacements = computeReplacements(originalLines, filePath, chunks)
let newLines = applyReplacements(originalLines, replacements)
// Ensure trailing newline
if (newLines.length === 0 || newLines[newLines.length - 1] !== "") {
newLines.push("")
}
const newContent = newLines.join("\n")
// Generate unified diff
const unifiedDiff = generateUnifiedDiff(originalContent, newContent)
return {
unified_diff: unifiedDiff,
content: newContent,
}
}
function computeReplacements(originalLines: string[], filePath: string, chunks: UpdateFileChunk[]): Array<[number, number, string[]]> {
const replacements: Array<[number, number, string[]]> = []
let lineIndex = 0
for (const chunk of chunks) {
// Handle context-based seeking
if (chunk.change_context) {
const contextIdx = seekSequence(originalLines, [chunk.change_context], lineIndex)
if (contextIdx === -1) {
throw new Error(`Failed to find context '${chunk.change_context}' in ${filePath}`)
}
lineIndex = contextIdx + 1
}
// Handle pure addition (no old lines)
if (chunk.old_lines.length === 0) {
const insertionIdx = originalLines.length > 0 && originalLines[originalLines.length - 1] === ""
? originalLines.length - 1
: originalLines.length
replacements.push([insertionIdx, 0, chunk.new_lines])
continue
}
// Try to match old lines in the file
let pattern = chunk.old_lines
let newSlice = chunk.new_lines
let found = seekSequence(originalLines, pattern, lineIndex)
// Retry without trailing empty line if not found
if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") {
pattern = pattern.slice(0, -1)
if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") {
newSlice = newSlice.slice(0, -1)
}
found = seekSequence(originalLines, pattern, lineIndex)
}
if (found !== -1) {
replacements.push([found, pattern.length, newSlice])
lineIndex = found + pattern.length
} else {
throw new Error(
`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`
)
}
}
// Sort replacements by index to apply in order
replacements.sort((a, b) => a[0] - b[0])
return replacements
}
function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] {
// Apply replacements in reverse order to avoid index shifting
const result = [...lines]
for (let i = replacements.length - 1; i >= 0; i--) {
const [startIdx, oldLen, newSegment] = replacements[i]
// Remove old lines
result.splice(startIdx, oldLen)
// Insert new lines
for (let j = 0; j < newSegment.length; j++) {
result.splice(startIdx + j, 0, newSegment[j])
}
}
return result
}
function seekSequence(lines: string[], pattern: string[], startIndex: number): number {
if (pattern.length === 0) return -1
// Simple substring search implementation
for (let i = startIndex; i <= lines.length - pattern.length; i++) {
let matches = true
for (let j = 0; j < pattern.length; j++) {
if (lines[i + j] !== pattern[j]) {
matches = false
break
}
}
if (matches) {
return i
}
}
return -1
}
function generateUnifiedDiff(oldContent: string, newContent: string): string {
const oldLines = oldContent.split("\n")
const newLines = newContent.split("\n")
// Simple diff generation - in a real implementation you'd use a proper diff algorithm
let diff = "@@ -1 +1 @@\n"
// Find changes (simplified approach)
const maxLen = Math.max(oldLines.length, newLines.length)
let hasChanges = false
for (let i = 0; i < maxLen; i++) {
const oldLine = oldLines[i] || ""
const newLine = newLines[i] || ""
if (oldLine !== newLine) {
if (oldLine) diff += `-${oldLine}\n`
if (newLine) diff += `+${newLine}\n`
hasChanges = true
} else if (oldLine) {
diff += ` ${oldLine}\n`
}
}
return hasChanges ? diff : ""
}
// Apply hunks to filesystem
export async function applyHunksToFiles(hunks: Hunk[]): Promise<AffectedPaths> {
if (hunks.length === 0) {
throw new Error("No files were modified.")
}
const added: string[] = []
const modified: string[] = []
const deleted: string[] = []
for (const hunk of hunks) {
switch (hunk.type) {
case "add":
// Create parent directories
const addDir = path.dirname(hunk.path)
if (addDir !== "." && addDir !== "/") {
await fs.mkdir(addDir, { recursive: true })
}
await fs.writeFile(hunk.path, hunk.contents, "utf-8")
added.push(hunk.path)
log.info(`Added file: ${hunk.path}`)
break
case "delete":
await fs.unlink(hunk.path)
deleted.push(hunk.path)
log.info(`Deleted file: ${hunk.path}`)
break
case "update":
const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks)
if (hunk.move_path) {
// Handle file move
const moveDir = path.dirname(hunk.move_path)
if (moveDir !== "." && moveDir !== "/") {
await fs.mkdir(moveDir, { recursive: true })
}
await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8")
await fs.unlink(hunk.path)
modified.push(hunk.move_path)
log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`)
} else {
// Regular update
await fs.writeFile(hunk.path, fileUpdate.content, "utf-8")
modified.push(hunk.path)
log.info(`Updated file: ${hunk.path}`)
}
break
}
}
return { added, modified, deleted }
}
// Main patch application function
export async function applyPatch(patchText: string): Promise<AffectedPaths> {
const { hunks } = parsePatch(patchText)
return applyHunksToFiles(hunks)
}
// Async version of maybeParseApplyPatchVerified
export async function maybeParseApplyPatchVerified(argv: string[], cwd: string): Promise<
| { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction }
| { type: MaybeApplyPatchVerified.CorrectnessError; error: Error }
| { type: MaybeApplyPatchVerified.NotApplyPatch }
> {
// Detect implicit patch invocation (raw patch without apply_patch command)
if (argv.length === 1) {
try {
parsePatch(argv[0])
return {
type: MaybeApplyPatchVerified.CorrectnessError,
error: new Error(ApplyPatchError.ImplicitInvocation),
}
} catch {
// Not a patch, continue
}
}
const result = maybeParseApplyPatch(argv)
switch (result.type) {
case MaybeApplyPatch.Body:
const { args } = result
const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd
const changes = new Map<string, ApplyPatchFileChange>()
for (const hunk of args.hunks) {
const resolvedPath = path.resolve(effectiveCwd, hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path)
switch (hunk.type) {
case "add":
changes.set(resolvedPath, {
type: "add",
content: hunk.contents,
})
break
case "delete":
// For delete, we need to read the current content
const deletePath = path.resolve(effectiveCwd, hunk.path)
try {
const content = await fs.readFile(deletePath, "utf-8")
changes.set(resolvedPath, {
type: "delete",
content,
})
} catch (error) {
return {
type: MaybeApplyPatchVerified.CorrectnessError,
error: new Error(`Failed to read file for deletion: ${deletePath}`),
}
}
break
case "update":
const updatePath = path.resolve(effectiveCwd, hunk.path)
try {
const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks)
changes.set(resolvedPath, {
type: "update",
unified_diff: fileUpdate.unified_diff,
move_path: hunk.move_path ? path.resolve(effectiveCwd, hunk.move_path) : undefined,
new_content: fileUpdate.content,
})
} catch (error) {
return {
type: MaybeApplyPatchVerified.CorrectnessError,
error: error as Error,
}
}
break
}
}
return {
type: MaybeApplyPatchVerified.Body,
action: {
changes,
patch: args.patch,
cwd: effectiveCwd,
},
}
case MaybeApplyPatch.PatchParseError:
return {
type: MaybeApplyPatchVerified.CorrectnessError,
error: result.error,
}
case MaybeApplyPatch.NotApplyPatch:
return { type: MaybeApplyPatchVerified.NotApplyPatch }
}
}
}

View File

@@ -3,340 +3,205 @@ import * as path from "path"
import * as fs from "fs/promises"
import { Tool } from "./tool"
import { FileTime } from "../file/time"
import DESCRIPTION from "./patch.txt"
import { Permission } from "../permission"
import { Bus } from "../bus"
import { FileWatcher } from "../file/watcher"
import { Instance } from "../project/instance"
import { Agent } from "../agent/agent"
import { Patch } from "../patch"
import { Filesystem } from "../util/filesystem"
import { createTwoFilesPatch } from "diff"
const PatchParams = z.object({
patchText: z.string().describe("The full patch text that describes all changes to be made"),
})
interface Change {
type: "add" | "update" | "delete"
old_content?: string
new_content?: string
}
interface Commit {
changes: Record<string, Change>
}
interface PatchOperation {
type: "update" | "add" | "delete"
filePath: string
hunks?: PatchHunk[]
content?: string
}
interface PatchHunk {
contextLine: string
changes: PatchChange[]
}
interface PatchChange {
type: "keep" | "remove" | "add"
content: string
}
function identifyFilesNeeded(patchText: string): string[] {
const files: string[] = []
const lines = patchText.split("\n")
for (const line of lines) {
if (line.startsWith("*** Update File:") || line.startsWith("*** Delete File:")) {
const filePath = line.split(":", 2)[1]?.trim()
if (filePath) files.push(filePath)
}
}
return files
}
function identifyFilesAdded(patchText: string): string[] {
const files: string[] = []
const lines = patchText.split("\n")
for (const line of lines) {
if (line.startsWith("*** Add File:")) {
const filePath = line.split(":", 2)[1]?.trim()
if (filePath) files.push(filePath)
}
}
return files
}
function textToPatch(patchText: string, _currentFiles: Record<string, string>): [PatchOperation[], number] {
const operations: PatchOperation[] = []
const lines = patchText.split("\n")
let i = 0
let fuzz = 0
while (i < lines.length) {
const line = lines[i]
if (line.startsWith("*** Update File:")) {
const filePath = line.split(":", 2)[1]?.trim()
if (!filePath) {
i++
continue
}
const hunks: PatchHunk[] = []
i++
while (i < lines.length && !lines[i].startsWith("***")) {
if (lines[i].startsWith("@@")) {
const contextLine = lines[i].substring(2).trim()
const changes: PatchChange[] = []
i++
while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) {
const changeLine = lines[i]
if (changeLine.startsWith(" ")) {
changes.push({ type: "keep", content: changeLine.substring(1) })
} else if (changeLine.startsWith("-")) {
changes.push({
type: "remove",
content: changeLine.substring(1),
})
} else if (changeLine.startsWith("+")) {
changes.push({ type: "add", content: changeLine.substring(1) })
}
i++
}
hunks.push({ contextLine, changes })
} else {
i++
}
}
operations.push({ type: "update", filePath, hunks })
} else if (line.startsWith("*** Add File:")) {
const filePath = line.split(":", 2)[1]?.trim()
if (!filePath) {
i++
continue
}
let content = ""
i++
while (i < lines.length && !lines[i].startsWith("***")) {
if (lines[i].startsWith("+")) {
content += lines[i].substring(1) + "\n"
}
i++
}
operations.push({ type: "add", filePath, content: content.slice(0, -1) })
} else if (line.startsWith("*** Delete File:")) {
const filePath = line.split(":", 2)[1]?.trim()
if (filePath) {
operations.push({ type: "delete", filePath })
}
i++
} else {
i++
}
}
return [operations, fuzz]
}
function patchToCommit(operations: PatchOperation[], currentFiles: Record<string, string>): Commit {
const changes: Record<string, Change> = {}
for (const op of operations) {
if (op.type === "delete") {
changes[op.filePath] = {
type: "delete",
old_content: currentFiles[op.filePath] || "",
}
} else if (op.type === "add") {
changes[op.filePath] = {
type: "add",
new_content: op.content || "",
}
} else if (op.type === "update" && op.hunks) {
const originalContent = currentFiles[op.filePath] || ""
const lines = originalContent.split("\n")
for (const hunk of op.hunks) {
const contextIndex = lines.findIndex((line) => line.includes(hunk.contextLine))
if (contextIndex === -1) {
throw new Error(`Context line not found: ${hunk.contextLine}`)
}
let currentIndex = contextIndex
for (const change of hunk.changes) {
if (change.type === "keep") {
currentIndex++
} else if (change.type === "remove") {
lines.splice(currentIndex, 1)
} else if (change.type === "add") {
lines.splice(currentIndex, 0, change.content)
currentIndex++
}
}
}
changes[op.filePath] = {
type: "update",
old_content: originalContent,
new_content: lines.join("\n"),
}
}
}
return { changes }
}
function generateDiff(oldContent: string, newContent: string, filePath: string): [string, number, number] {
// Mock implementation - would need actual diff generation
const lines1 = oldContent.split("\n")
const lines2 = newContent.split("\n")
const additions = Math.max(0, lines2.length - lines1.length)
const removals = Math.max(0, lines1.length - lines2.length)
return [`--- ${filePath}\n+++ ${filePath}\n`, additions, removals]
}
async function applyCommit(
commit: Commit,
writeFile: (path: string, content: string) => Promise<void>,
deleteFile: (path: string) => Promise<void>,
): Promise<void> {
for (const [filePath, change] of Object.entries(commit.changes)) {
if (change.type === "delete") {
await deleteFile(filePath)
} else if (change.new_content !== undefined) {
await writeFile(filePath, change.new_content)
}
}
}
export const PatchTool = Tool.define("patch", {
description: DESCRIPTION,
description: "Apply a patch to modify multiple files. Supports adding, updating, and deleting files with context-aware changes.",
parameters: PatchParams,
execute: async (params, ctx) => {
// Identify all files needed for the patch and verify they've been read
const filesToRead = identifyFilesNeeded(params.patchText)
for (const filePath of filesToRead) {
let absPath = filePath
if (!path.isAbsolute(absPath)) {
absPath = path.resolve(process.cwd(), absPath)
async execute(params, ctx) {
if (!params.patchText) {
throw new Error("patchText is required")
}
// Parse the patch to get hunks
let hunks: Patch.Hunk[]
try {
const parseResult = Patch.parsePatch(params.patchText)
hunks = parseResult.hunks
} catch (error) {
throw new Error(`Failed to parse patch: ${error}`)
}
if (hunks.length === 0) {
throw new Error("No file changes found in patch")
}
// Validate file paths and check permissions
const agent = await Agent.get(ctx.agent)
const fileChanges: Array<{
filePath: string
oldContent: string
newContent: string
type: "add" | "update" | "delete" | "move"
movePath?: string
}> = []
let totalDiff = ""
for (const hunk of hunks) {
const filePath = path.resolve(Instance.directory, hunk.path)
if (!Filesystem.contains(Instance.directory, filePath)) {
throw new Error(`File ${filePath} is not in the current working directory`)
}
await FileTime.assert(ctx.sessionID, absPath)
try {
const stats = await fs.stat(absPath)
if (stats.isDirectory()) {
throw new Error(`path is a directory, not a file: ${absPath}`)
}
} catch (error: any) {
if (error.code === "ENOENT") {
throw new Error(`file not found: ${absPath}`)
}
throw new Error(`failed to access file: ${error.message}`)
switch (hunk.type) {
case "add":
if (hunk.type === "add") {
const oldContent = ""
const newContent = hunk.contents
const diff = createTwoFilesPatch(filePath, filePath, oldContent, newContent)
fileChanges.push({
filePath,
oldContent,
newContent,
type: "add",
})
totalDiff += diff + "\n"
}
break
case "update":
// Check if file exists for update
const stats = await fs.stat(filePath).catch(() => null)
if (!stats || stats.isDirectory()) {
throw new Error(`File not found or is directory: ${filePath}`)
}
// Read file and update time tracking (like edit tool does)
await FileTime.assert(ctx.sessionID, filePath)
const oldContent = await fs.readFile(filePath, "utf-8")
let newContent = oldContent
// Apply the update chunks to get new content
try {
const fileUpdate = Patch.deriveNewContentsFromChunks(filePath, hunk.chunks)
newContent = fileUpdate.content
} catch (error) {
throw new Error(`Failed to apply update to ${filePath}: ${error}`)
}
const diff = createTwoFilesPatch(filePath, filePath, oldContent, newContent)
fileChanges.push({
filePath,
oldContent,
newContent,
type: hunk.move_path ? "move" : "update",
movePath: hunk.move_path ? path.resolve(Instance.directory, hunk.move_path) : undefined,
})
totalDiff += diff + "\n"
break
case "delete":
// Check if file exists for deletion
await FileTime.assert(ctx.sessionID, filePath)
const contentToDelete = await fs.readFile(filePath, "utf-8")
const deleteDiff = createTwoFilesPatch(filePath, filePath, contentToDelete, "")
fileChanges.push({
filePath,
oldContent: contentToDelete,
newContent: "",
type: "delete",
})
totalDiff += deleteDiff + "\n"
break
}
}
// Check for new files to ensure they don't already exist
const filesToAdd = identifyFilesAdded(params.patchText)
for (const filePath of filesToAdd) {
let absPath = filePath
if (!path.isAbsolute(absPath)) {
absPath = path.resolve(process.cwd(), absPath)
}
try {
await fs.stat(absPath)
throw new Error(`file already exists and cannot be added: ${absPath}`)
} catch (error: any) {
if (error.code !== "ENOENT") {
throw new Error(`failed to check file: ${error.message}`)
}
}
// Check permissions if needed
if (agent.permission.edit === "ask") {
await Permission.ask({
type: "edit",
sessionID: ctx.sessionID,
messageID: ctx.messageID,
callID: ctx.callID,
title: `Apply patch to ${fileChanges.length} files`,
metadata: {
diff: totalDiff,
},
})
}
// Load all required files
const currentFiles: Record<string, string> = {}
for (const filePath of filesToRead) {
let absPath = filePath
if (!path.isAbsolute(absPath)) {
absPath = path.resolve(process.cwd(), absPath)
}
try {
const content = await fs.readFile(absPath, "utf-8")
currentFiles[filePath] = content
} catch (error: any) {
throw new Error(`failed to read file ${absPath}: ${error.message}`)
}
}
// Process the patch
const [patch, fuzz] = textToPatch(params.patchText, currentFiles)
if (fuzz > 3) {
throw new Error(`patch contains fuzzy matches (fuzz level: ${fuzz}). Please make your context lines more precise`)
}
// Convert patch to commit
const commit = patchToCommit(patch, currentFiles)
// Apply the changes to the filesystem
await applyCommit(
commit,
async (filePath: string, content: string) => {
let absPath = filePath
if (!path.isAbsolute(absPath)) {
absPath = path.resolve(process.cwd(), absPath)
}
// Create parent directories if needed
const dir = path.dirname(absPath)
await fs.mkdir(dir, { recursive: true })
await fs.writeFile(absPath, content, "utf-8")
},
async (filePath: string) => {
let absPath = filePath
if (!path.isAbsolute(absPath)) {
absPath = path.resolve(process.cwd(), absPath)
}
await fs.unlink(absPath)
},
)
// Calculate statistics
// Apply the changes
const changedFiles: string[] = []
let totalAdditions = 0
let totalRemovals = 0
for (const [filePath, change] of Object.entries(commit.changes)) {
let absPath = filePath
if (!path.isAbsolute(absPath)) {
absPath = path.resolve(process.cwd(), absPath)
for (const change of fileChanges) {
switch (change.type) {
case "add":
// Create parent directories
const addDir = path.dirname(change.filePath)
if (addDir !== "." && addDir !== "/") {
await fs.mkdir(addDir, { recursive: true })
}
await fs.writeFile(change.filePath, change.newContent, "utf-8")
changedFiles.push(change.filePath)
break
case "update":
await fs.writeFile(change.filePath, change.newContent, "utf-8")
changedFiles.push(change.filePath)
break
case "move":
if (change.movePath) {
// Create parent directories for destination
const moveDir = path.dirname(change.movePath)
if (moveDir !== "." && moveDir !== "/") {
await fs.mkdir(moveDir, { recursive: true })
}
// Write to new location
await fs.writeFile(change.movePath, change.newContent, "utf-8")
// Remove original
await fs.unlink(change.filePath)
changedFiles.push(change.movePath)
}
break
case "delete":
await fs.unlink(change.filePath)
changedFiles.push(change.filePath)
break
}
// Update file time tracking
FileTime.read(ctx.sessionID, change.filePath)
if (change.movePath) {
FileTime.read(ctx.sessionID, change.movePath)
}
changedFiles.push(absPath)
const oldContent = change.old_content || ""
const newContent = change.new_content || ""
// Calculate diff statistics
const [, additions, removals] = generateDiff(oldContent, newContent, filePath)
totalAdditions += additions
totalRemovals += removals
FileTime.read(ctx.sessionID, absPath)
}
const result = `Patch applied successfully. ${changedFiles.length} files changed, ${totalAdditions} additions, ${totalRemovals} removals`
const output = result
// Publish file change events
for (const filePath of changedFiles) {
await Bus.publish(FileWatcher.Event.Updated, { file: filePath, event: "change" })
}
// Generate output summary
const relativePaths = changedFiles.map(filePath => path.relative(Instance.worktree, filePath))
const summary = `${fileChanges.length} files changed`
return {
title: `${filesToRead.length} files`,
title: summary,
metadata: {
changed: changedFiles,
additions: totalAdditions,
removals: totalRemovals,
diff: totalDiff,
},
output,
output: `Patch applied successfully. ${summary}:\n${relativePaths.map(p => ` ${p}`).join("\n")}`,
}
},
})
})

View File

@@ -0,0 +1,66 @@
import { describe, test, expect } from "bun:test"
import { Patch } from "../../src/patch"
describe("Patch integration", () => {
test("should be compatible with existing tool system", () => {
// Test that our Patch namespace can be imported and used
expect(Patch).toBeDefined()
expect(Patch.parsePatch).toBeDefined()
expect(Patch.applyPatch).toBeDefined()
expect(Patch.maybeParseApplyPatch).toBeDefined()
expect(Patch.PatchSchema).toBeDefined()
})
test("should parse patch format compatible with existing tool", () => {
const patchText = `*** Begin Patch
*** Add File: test-integration.txt
+Integration test content
*** End Patch`
const result = Patch.parsePatch(patchText)
expect(result.hunks).toHaveLength(1)
expect(result.hunks[0].type).toBe("add")
expect(result.hunks[0].path).toBe("test-integration.txt")
if (result.hunks[0].type === "add") {
expect(result.hunks[0].contents).toBe("Integration test content")
}
})
test("should handle complex patch with multiple operations", () => {
const patchText = `*** Begin Patch
*** Add File: new-file.txt
+This is a new file
+with multiple lines
*** Update File: existing.txt
@@
old content
-line to remove
+line to add
more content
*** Delete File: old-file.txt
*** End Patch`
const result = Patch.parsePatch(patchText)
expect(result.hunks).toHaveLength(3)
// Check add operation
expect(result.hunks[0].type).toBe("add")
if (result.hunks[0].type === "add") {
expect(result.hunks[0].contents).toBe("This is a new file\nwith multiple lines")
}
// Check update operation
expect(result.hunks[1].type).toBe("update")
if (result.hunks[1].type === "update") {
expect(result.hunks[1].path).toBe("existing.txt")
expect(result.hunks[1].chunks).toHaveLength(1)
expect(result.hunks[1].chunks[0].old_lines).toEqual(["old content", "line to remove", "more content"])
expect(result.hunks[1].chunks[0].new_lines).toEqual(["old content", "line to add", "more content"])
expect(result.hunks[1].chunks[0].change_context).toBeUndefined()
}
// Check delete operation
expect(result.hunks[2].type).toBe("delete")
expect(result.hunks[2].path).toBe("old-file.txt")
})
})

View File

@@ -0,0 +1,339 @@
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
import { Patch } from "../../src/patch"
import * as fs from "fs/promises"
import * as path from "path"
import { tmpdir } from "os"
describe("Patch namespace", () => {
let tempDir: string
beforeEach(async () => {
tempDir = await fs.mkdtemp(path.join(tmpdir(), "patch-test-"))
})
afterEach(async () => {
// Clean up temp directory
await fs.rm(tempDir, { recursive: true, force: true })
})
describe("parsePatch", () => {
test("should parse simple add file patch", () => {
const patchText = `*** Begin Patch
*** Add File: test.txt
+Hello World
*** End Patch`
const result = Patch.parsePatch(patchText)
expect(result.hunks).toHaveLength(1)
expect(result.hunks[0]).toEqual({
type: "add",
path: "test.txt",
contents: "Hello World",
})
})
test("should parse delete file patch", () => {
const patchText = `*** Begin Patch
*** Delete File: old.txt
*** End Patch`
const result = Patch.parsePatch(patchText)
expect(result.hunks).toHaveLength(1)
const hunk = result.hunks[0]
expect(hunk.type).toBe("delete")
expect(hunk.path).toBe("old.txt")
})
test("should parse patch with multiple hunks", () => {
const patchText = `*** Begin Patch
*** Add File: new.txt
+This is a new file
*** Update File: existing.txt
@@
old line
-new line
+updated line
*** End Patch`
const result = Patch.parsePatch(patchText)
expect(result.hunks).toHaveLength(2)
expect(result.hunks[0].type).toBe("add")
expect(result.hunks[1].type).toBe("update")
})
test("should parse file move operation", () => {
const patchText = `*** Begin Patch
*** Update File: old-name.txt
*** Move to: new-name.txt
@@
-Old content
+New content
*** End Patch`
const result = Patch.parsePatch(patchText)
expect(result.hunks).toHaveLength(1)
const hunk = result.hunks[0]
expect(hunk.type).toBe("update")
expect(hunk.path).toBe("old-name.txt")
if (hunk.type === "update") {
expect(hunk.move_path).toBe("new-name.txt")
}
})
test("should throw error for invalid patch format", () => {
const invalidPatch = `This is not a valid patch`
expect(() => Patch.parsePatch(invalidPatch)).toThrow("Invalid patch format")
})
})
describe("maybeParseApplyPatch", () => {
test("should parse direct apply_patch command", () => {
const patchText = `*** Begin Patch
*** Add File: test.txt
+Content
*** End Patch`
const result = Patch.maybeParseApplyPatch(["apply_patch", patchText])
expect(result.type).toBe(Patch.MaybeApplyPatch.Body)
if (result.type === Patch.MaybeApplyPatch.Body) {
expect(result.args.patch).toBe(patchText)
expect(result.args.hunks).toHaveLength(1)
}
})
test("should parse applypatch command", () => {
const patchText = `*** Begin Patch
*** Add File: test.txt
+Content
*** End Patch`
const result = Patch.maybeParseApplyPatch(["applypatch", patchText])
expect(result.type).toBe(Patch.MaybeApplyPatch.Body)
})
test("should handle bash heredoc format", () => {
const script = `apply_patch <<'PATCH'
*** Begin Patch
*** Add File: test.txt
+Content
*** End Patch
PATCH`
const result = Patch.maybeParseApplyPatch(["bash", "-lc", script])
expect(result.type).toBe(Patch.MaybeApplyPatch.Body)
if (result.type === Patch.MaybeApplyPatch.Body) {
expect(result.args.hunks).toHaveLength(1)
}
})
test("should return NotApplyPatch for non-patch commands", () => {
const result = Patch.maybeParseApplyPatch(["echo", "hello"])
expect(result.type).toBe(Patch.MaybeApplyPatch.NotApplyPatch)
})
})
describe("applyPatch", () => {
test("should add a new file", async () => {
const patchText = `*** Begin Patch
*** Add File: ${tempDir}/new-file.txt
+Hello World
+This is a new file
*** End Patch`
const result = await Patch.applyPatch(patchText)
expect(result.added).toHaveLength(1)
expect(result.modified).toHaveLength(0)
expect(result.deleted).toHaveLength(0)
const content = await fs.readFile(result.added[0], "utf-8")
expect(content).toBe("Hello World\nThis is a new file")
})
test("should delete an existing file", async () => {
const filePath = path.join(tempDir, "to-delete.txt")
await fs.writeFile(filePath, "This file will be deleted")
const patchText = `*** Begin Patch
*** Delete File: ${filePath}
*** End Patch`
const result = await Patch.applyPatch(patchText)
expect(result.deleted).toHaveLength(1)
expect(result.deleted[0]).toBe(filePath)
const exists = await fs.access(filePath).then(() => true).catch(() => false)
expect(exists).toBe(false)
})
test("should update an existing file", async () => {
const filePath = path.join(tempDir, "to-update.txt")
await fs.writeFile(filePath, "line 1\nline 2\nline 3\n")
const patchText = `*** Begin Patch
*** Update File: ${filePath}
@@
line 1
-line 2
+line 2 updated
line 3
*** End Patch`
const result = await Patch.applyPatch(patchText)
expect(result.modified).toHaveLength(1)
expect(result.modified[0]).toBe(filePath)
const content = await fs.readFile(filePath, "utf-8")
expect(content).toBe("line 1\nline 2 updated\nline 3\n")
})
test("should move and update a file", async () => {
const oldPath = path.join(tempDir, "old-name.txt")
const newPath = path.join(tempDir, "new-name.txt")
await fs.writeFile(oldPath, "old content\n")
const patchText = `*** Begin Patch
*** Update File: ${oldPath}
*** Move to: ${newPath}
@@
-old content
+new content
*** End Patch`
const result = await Patch.applyPatch(patchText)
expect(result.modified).toHaveLength(1)
expect(result.modified[0]).toBe(newPath)
const oldExists = await fs.access(oldPath).then(() => true).catch(() => false)
expect(oldExists).toBe(false)
const newContent = await fs.readFile(newPath, "utf-8")
expect(newContent).toBe("new content\n")
})
test("should handle multiple operations in one patch", async () => {
const file1 = path.join(tempDir, "file1.txt")
const file2 = path.join(tempDir, "file2.txt")
const file3 = path.join(tempDir, "file3.txt")
await fs.writeFile(file1, "content 1")
await fs.writeFile(file2, "content 2")
const patchText = `*** Begin Patch
*** Add File: ${file3}
+new file content
*** Update File: ${file1}
@@
-content 1
+updated content 1
*** Delete File: ${file2}
*** End Patch`
const result = await Patch.applyPatch(patchText)
expect(result.added).toHaveLength(1)
expect(result.modified).toHaveLength(1)
expect(result.deleted).toHaveLength(1)
})
test("should create parent directories when adding files", async () => {
const nestedPath = path.join(tempDir, "deep", "nested", "file.txt")
const patchText = `*** Begin Patch
*** Add File: ${nestedPath}
+Deep nested content
*** End Patch`
const result = await Patch.applyPatch(patchText)
expect(result.added).toHaveLength(1)
expect(result.added[0]).toBe(nestedPath)
const exists = await fs.access(nestedPath).then(() => true).catch(() => false)
expect(exists).toBe(true)
})
})
describe("error handling", () => {
test("should throw error when updating non-existent file", async () => {
const nonExistent = path.join(tempDir, "does-not-exist.txt")
const patchText = `*** Begin Patch
*** Update File: ${nonExistent}
@@
-old line
+new line
*** End Patch`
await expect(Patch.applyPatch(patchText)).rejects.toThrow()
})
test("should throw error when deleting non-existent file", async () => {
const nonExistent = path.join(tempDir, "does-not-exist.txt")
const patchText = `*** Begin Patch
*** Delete File: ${nonExistent}
*** End Patch`
await expect(Patch.applyPatch(patchText)).rejects.toThrow()
})
})
describe("edge cases", () => {
test("should handle empty files", async () => {
const emptyFile = path.join(tempDir, "empty.txt")
await fs.writeFile(emptyFile, "")
const patchText = `*** Begin Patch
*** Update File: ${emptyFile}
@@
+First line
*** End Patch`
const result = await Patch.applyPatch(patchText)
expect(result.modified).toHaveLength(1)
const content = await fs.readFile(emptyFile, "utf-8")
expect(content).toBe("First line\n")
})
test("should handle files with no trailing newline", async () => {
const filePath = path.join(tempDir, "no-newline.txt")
await fs.writeFile(filePath, "no newline")
const patchText = `*** Begin Patch
*** Update File: ${filePath}
@@
-no newline
+has newline now
*** End Patch`
const result = await Patch.applyPatch(patchText)
expect(result.modified).toHaveLength(1)
const content = await fs.readFile(filePath, "utf-8")
expect(content).toBe("has newline now\n")
})
test("should handle multiple update chunks in single file", async () => {
const filePath = path.join(tempDir, "multi-chunk.txt")
await fs.writeFile(filePath, "line 1\nline 2\nline 3\nline 4\n")
const patchText = `*** Begin Patch
*** Update File: ${filePath}
@@
line 1
-line 2
+LINE 2
@@
line 3
-line 4
+LINE 4
*** End Patch`
const result = await Patch.applyPatch(patchText)
expect(result.modified).toHaveLength(1)
const content = await fs.readFile(filePath, "utf-8")
expect(content).toBe("line 1\nLINE 2\nline 3\nLINE 4\n")
})
})
})

View File

@@ -0,0 +1,263 @@
import { describe, expect, test } from "bun:test"
import path from "path"
import { PatchTool } from "../../src/tool/patch"
import { Log } from "../../src/util/log"
import { Instance } from "../../src/project/instance"
import { tmpdir } from "../fixture/fixture"
import * as fs from "fs/promises"
const ctx = {
sessionID: "test",
messageID: "",
toolCallID: "",
agent: "build",
abort: AbortSignal.any([]),
metadata: () => {},
}
const patchTool = await PatchTool.init()
Log.init({ print: false })
describe("tool.patch", () => {
test("should validate required parameters", async () => {
await Instance.provide({
directory: "/tmp",
fn: async () => {
await expect(
patchTool.execute({ patchText: "" }, ctx)
).rejects.toThrow("patchText is required")
},
})
})
test("should validate patch format", async () => {
await Instance.provide({
directory: "/tmp",
fn: async () => {
await expect(
patchTool.execute({ patchText: "invalid patch" }, ctx)
).rejects.toThrow("Failed to parse patch")
},
})
})
test("should handle empty patch", async () => {
await Instance.provide({
directory: "/tmp",
fn: async () => {
const emptyPatch = `*** Begin Patch
*** End Patch`
await expect(
patchTool.execute({ patchText: emptyPatch }, ctx)
).rejects.toThrow("No file changes found in patch")
},
})
})
test("should reject files outside working directory", async () => {
await Instance.provide({
directory: "/tmp",
fn: async () => {
const maliciousPatch = `*** Begin Patch
*** Add File: /etc/passwd
+malicious content
*** End Patch`
await expect(
patchTool.execute({ patchText: maliciousPatch }, ctx)
).rejects.toThrow("is not in the current working directory")
},
})
})
test("should handle simple add file operation", async () => {
await using fixture = await tmpdir()
await Instance.provide({
directory: fixture.path,
fn: async () => {
const patchText = `*** Begin Patch
*** Add File: test-file.txt
+Hello World
+This is a test file
*** End Patch`
const result = await patchTool.execute({ patchText }, ctx)
expect(result.title).toContain("files changed")
expect(result.metadata.diff).toBeDefined()
expect(result.output).toContain("Patch applied successfully")
// Verify file was created
const filePath = path.join(fixture.path, "test-file.txt")
const content = await fs.readFile(filePath, "utf-8")
expect(content).toBe("Hello World\nThis is a test file")
},
})
})
test("should handle file with context update", async () => {
await using fixture = await tmpdir()
await Instance.provide({
directory: fixture.path,
fn: async () => {
const patchText = `*** Begin Patch
*** Add File: config.js
+const API_KEY = "test-key"
+const DEBUG = false
+const VERSION = "1.0"
*** End Patch`
const result = await patchTool.execute({ patchText }, ctx)
expect(result.title).toContain("files changed")
expect(result.metadata.diff).toBeDefined()
expect(result.output).toContain("Patch applied successfully")
// Verify file was created with correct content
const filePath = path.join(fixture.path, "config.js")
const content = await fs.readFile(filePath, "utf-8")
expect(content).toBe("const API_KEY = \"test-key\"\nconst DEBUG = false\nconst VERSION = \"1.0\"")
},
})
})
test("should handle multiple file operations", async () => {
await using fixture = await tmpdir()
await Instance.provide({
directory: fixture.path,
fn: async () => {
const patchText = `*** Begin Patch
*** Add File: file1.txt
+Content of file 1
*** Add File: file2.txt
+Content of file 2
*** Add File: file3.txt
+Content of file 3
*** End Patch`
const result = await patchTool.execute({ patchText }, ctx)
expect(result.title).toContain("3 files changed")
expect(result.metadata.diff).toBeDefined()
expect(result.output).toContain("Patch applied successfully")
// Verify all files were created
for (let i = 1; i <= 3; i++) {
const filePath = path.join(fixture.path, `file${i}.txt`)
const content = await fs.readFile(filePath, "utf-8")
expect(content).toBe(`Content of file ${i}`)
}
},
})
})
test("should create parent directories when adding nested files", async () => {
await using fixture = await tmpdir()
await Instance.provide({
directory: fixture.path,
fn: async () => {
const patchText = `*** Begin Patch
*** Add File: deep/nested/file.txt
+Deep nested content
*** End Patch`
const result = await patchTool.execute({ patchText }, ctx)
expect(result.title).toContain("files changed")
expect(result.output).toContain("Patch applied successfully")
// Verify nested file was created
const nestedPath = path.join(fixture.path, "deep", "nested", "file.txt")
const exists = await fs.access(nestedPath).then(() => true).catch(() => false)
expect(exists).toBe(true)
const content = await fs.readFile(nestedPath, "utf-8")
expect(content).toBe("Deep nested content")
},
})
})
test("should generate proper unified diff in metadata", async () => {
await using fixture = await tmpdir()
await Instance.provide({
directory: fixture.path,
fn: async () => {
// First create a file with simple content
const patchText1 = `*** Begin Patch
*** Add File: test.txt
+line 1
+line 2
+line 3
*** End Patch`
await patchTool.execute({ patchText: patchText1 }, ctx)
// Now create an update patch
const patchText2 = `*** Begin Patch
*** Update File: test.txt
@@
line 1
-line 2
+line 2 updated
line 3
*** End Patch`
const result = await patchTool.execute({ patchText: patchText2 }, ctx)
expect(result.metadata.diff).toBeDefined()
expect(result.metadata.diff).toContain("@@")
expect(result.metadata.diff).toContain("-line 2")
expect(result.metadata.diff).toContain("+line 2 updated")
},
})
})
test("should handle complex patch with multiple operations", async () => {
await using fixture = await tmpdir()
await Instance.provide({
directory: fixture.path,
fn: async () => {
const patchText = `*** Begin Patch
*** Add File: new.txt
+This is a new file
+with multiple lines
*** Add File: existing.txt
+old content
+new line
+more content
*** Add File: config.json
+{
+ "version": "1.0",
+ "debug": true
+}
*** End Patch`
const result = await patchTool.execute({ patchText }, ctx)
expect(result.title).toContain("3 files changed")
expect(result.metadata.diff).toBeDefined()
expect(result.output).toContain("Patch applied successfully")
// Verify all files were created
const newPath = path.join(fixture.path, "new.txt")
const newContent = await fs.readFile(newPath, "utf-8")
expect(newContent).toBe("This is a new file\nwith multiple lines")
const existingPath = path.join(fixture.path, "existing.txt")
const existingContent = await fs.readFile(existingPath, "utf-8")
expect(existingContent).toBe("old content\nnew line\nmore content")
const configPath = path.join(fixture.path, "config.json")
const configContent = await fs.readFile(configPath, "utf-8")
expect(configContent).toBe('{\n "version": "1.0",\n "debug": true\n}')
},
})
})
})