Răsfoiți Sursa

feat: add apply_patch native tool (#9663)

Co-authored-by: daniel-lxs <[email protected]>
Hannes Rudolph 1 lună în urmă
părinte
comite
933534896a

+ 1 - 0
packages/types/src/tool.ts

@@ -21,6 +21,7 @@ export const toolNames = [
 	"apply_diff",
 	"insert_content",
 	"search_and_replace",
+	"apply_patch",
 	"search_files",
 	"list_files",
 	"list_code_definition_names",

+ 16 - 0
src/core/assistant-message/NativeToolCallParser.ts

@@ -519,6 +519,14 @@ export class NativeToolCallParser {
 				}
 				break
 
+			case "apply_patch":
+				if (partialArgs.patch !== undefined) {
+					nativeArgs = {
+						patch: partialArgs.patch,
+					}
+				}
+				break
+
 			// Add other tools as needed
 			default:
 				break
@@ -768,6 +776,14 @@ export class NativeToolCallParser {
 					}
 					break
 
+				case "apply_patch":
+					if (args.patch !== undefined) {
+						nativeArgs = {
+							patch: args.patch,
+						} as NativeArgsFor<TName>
+					}
+					break
+
 				default:
 					break
 			}

+ 13 - 0
src/core/assistant-message/presentAssistantMessage.ts

@@ -18,6 +18,7 @@ import { writeToFileTool } from "../tools/WriteToFileTool"
 import { applyDiffTool } from "../tools/MultiApplyDiffTool"
 import { insertContentTool } from "../tools/InsertContentTool"
 import { searchAndReplaceTool } from "../tools/SearchAndReplaceTool"
+import { applyPatchTool } from "../tools/ApplyPatchTool"
 import { listCodeDefinitionNamesTool } from "../tools/ListCodeDefinitionNamesTool"
 import { searchFilesTool } from "../tools/SearchFilesTool"
 import { browserActionTool } from "../tools/BrowserActionTool"
@@ -382,6 +383,8 @@ export async function presentAssistantMessage(cline: Task) {
 						return `[${block.name} for '${block.params.path}']`
 					case "search_and_replace":
 						return `[${block.name} for '${block.params.path}']`
+					case "apply_patch":
+						return `[${block.name}]`
 					case "list_files":
 						return `[${block.name} for '${block.params.path}']`
 					case "list_code_definition_names":
@@ -828,6 +831,16 @@ export async function presentAssistantMessage(cline: Task) {
 						toolProtocol,
 					})
 					break
+				case "apply_patch":
+					await checkpointSaveAndMark(cline)
+					await applyPatchTool.handle(cline, block as ToolUse<"apply_patch">, {
+						askApproval,
+						handleError,
+						pushToolResult,
+						removeClosingTag,
+						toolProtocol,
+					})
+					break
 				case "read_file":
 					// Check if this model should use the simplified single-file read tool
 					// Only use simplified tool for XML protocol - native protocol works with standard tool

+ 61 - 0
src/core/prompts/tools/native-tools/apply_patch.ts

@@ -0,0 +1,61 @@
+import type OpenAI from "openai"
+
+const apply_patch_DESCRIPTION = `Apply patches to files using a stripped-down, file-oriented diff format. This tool supports creating new files, deleting files, and updating existing files with precise changes.
+
+The patch format uses a simple, human-readable structure:
+
+*** Begin Patch
+[ one or more file sections ]
+*** End Patch
+
+Each file section starts with one of three headers:
+- *** Add File: <path> - Create a new file. Every following line is a + line (the initial contents).
+- *** Delete File: <path> - Remove an existing file. Nothing follows.
+- *** Update File: <path> - Patch an existing file in place.
+
+For Update File operations:
+- May be immediately followed by *** Move to: <new path> if you want to rename the file.
+- Then one or more "hunks", each introduced by @@ (optionally followed by context like a class or function name).
+- Within a hunk each line starts with:
+  - ' ' (space) for context lines (unchanged)
+  - '-' for lines to remove
+  - '+' for lines to add
+
+Context guidelines:
+- Show 3 lines of code above and below each change.
+- Use @@ with a class/function name if 3 lines of context is insufficient to uniquely identify the location.
+- Multiple @@ statements can be used for deeply nested code.
+
+Example patch:
+*** Begin Patch
+*** Add File: hello.txt
++Hello world
+*** Update File: src/app.py
+*** Move to: src/main.py
+@@ def greet():
+-print("Hi")
++print("Hello, world!")
+*** Delete File: obsolete.txt
+*** End Patch`
+
+const apply_patch = {
+	type: "function",
+	function: {
+		name: "apply_patch",
+		description: apply_patch_DESCRIPTION,
+		parameters: {
+			type: "object",
+			properties: {
+				patch: {
+					type: "string",
+					description:
+						"The complete patch text in the apply_patch format, starting with '*** Begin Patch' and ending with '*** End Patch'.",
+				},
+			},
+			required: ["patch"],
+			additionalProperties: false,
+		},
+	},
+} satisfies OpenAI.Chat.ChatCompletionTool
+
+export default apply_patch

+ 2 - 0
src/core/prompts/tools/native-tools/index.ts

@@ -1,6 +1,7 @@
 import type OpenAI from "openai"
 import accessMcpResource from "./access_mcp_resource"
 import { apply_diff } from "./apply_diff"
+import applyPatch from "./apply_patch"
 import askFollowupQuestion from "./ask_followup_question"
 import attemptCompletion from "./attempt_completion"
 import browserAction from "./browser_action"
@@ -33,6 +34,7 @@ export function getNativeTools(partialReadsEnabled: boolean = true): OpenAI.Chat
 	return [
 		accessMcpResource,
 		apply_diff,
+		applyPatch,
 		askFollowupQuestion,
 		attemptCompletion,
 		browserAction,

+ 450 - 0
src/core/tools/ApplyPatchTool.ts

@@ -0,0 +1,450 @@
+import fs from "fs/promises"
+import path from "path"
+
+import { getReadablePath } from "../../utils/path"
+import { isPathOutsideWorkspace } from "../../utils/pathUtils"
+import { Task } from "../task/Task"
+import { formatResponse } from "../prompts/responses"
+import { ClineSayTool } from "../../shared/ExtensionMessage"
+import { RecordSource } from "../context-tracking/FileContextTrackerTypes"
+import { fileExistsAtPath } from "../../utils/fs"
+import { DEFAULT_WRITE_DELAY_MS } from "@roo-code/types"
+import { EXPERIMENT_IDS, experiments } from "../../shared/experiments"
+import { sanitizeUnifiedDiff, computeDiffStats } from "../diff/stats"
+import { BaseTool, ToolCallbacks } from "./BaseTool"
+import type { ToolUse } from "../../shared/tools"
+import { parsePatch, ParseError, processAllHunks } from "./apply-patch"
+import type { ApplyPatchFileChange } from "./apply-patch"
+
+interface ApplyPatchParams {
+	patch: string
+}
+
+export class ApplyPatchTool extends BaseTool<"apply_patch"> {
+	readonly name = "apply_patch" as const
+
+	parseLegacy(params: Partial<Record<string, string>>): ApplyPatchParams {
+		return {
+			patch: params.patch || "",
+		}
+	}
+
+	async execute(params: ApplyPatchParams, task: Task, callbacks: ToolCallbacks): Promise<void> {
+		const { patch } = params
+		const { askApproval, handleError, pushToolResult, toolProtocol } = callbacks
+
+		try {
+			// Validate required parameters
+			if (!patch) {
+				task.consecutiveMistakeCount++
+				task.recordToolError("apply_patch")
+				pushToolResult(await task.sayAndCreateMissingParamError("apply_patch", "patch"))
+				return
+			}
+
+			// Parse the patch
+			let parsedPatch
+			try {
+				parsedPatch = parsePatch(patch)
+			} catch (error) {
+				task.consecutiveMistakeCount++
+				task.recordToolError("apply_patch")
+				const errorMessage =
+					error instanceof ParseError
+						? `Invalid patch format: ${error.message}`
+						: `Failed to parse patch: ${error instanceof Error ? error.message : String(error)}`
+				pushToolResult(formatResponse.toolError(errorMessage))
+				return
+			}
+
+			if (parsedPatch.hunks.length === 0) {
+				pushToolResult("No file operations found in patch.")
+				return
+			}
+
+			// Process each hunk
+			const readFile = async (filePath: string): Promise<string> => {
+				const absolutePath = path.resolve(task.cwd, filePath)
+				return await fs.readFile(absolutePath, "utf8")
+			}
+
+			let changes: ApplyPatchFileChange[]
+			try {
+				changes = await processAllHunks(parsedPatch.hunks, readFile)
+			} catch (error) {
+				task.consecutiveMistakeCount++
+				task.recordToolError("apply_patch")
+				const errorMessage = `Failed to process patch: ${error instanceof Error ? error.message : String(error)}`
+				pushToolResult(formatResponse.toolError(errorMessage))
+				return
+			}
+
+			// Process each file change
+			for (const change of changes) {
+				const relPath = change.path
+				const absolutePath = path.resolve(task.cwd, relPath)
+
+				// Check access permissions
+				const accessAllowed = task.rooIgnoreController?.validateAccess(relPath)
+				if (!accessAllowed) {
+					await task.say("rooignore_error", relPath)
+					pushToolResult(formatResponse.rooIgnoreError(relPath, toolProtocol))
+					return
+				}
+
+				// Check if file is write-protected
+				const isWriteProtected = task.rooProtectedController?.isWriteProtected(relPath) || false
+
+				if (change.type === "add") {
+					// Create new file
+					await this.handleAddFile(change, absolutePath, relPath, task, callbacks, isWriteProtected)
+				} else if (change.type === "delete") {
+					// Delete file
+					await this.handleDeleteFile(absolutePath, relPath, task, callbacks, isWriteProtected)
+				} else if (change.type === "update") {
+					// Update file
+					await this.handleUpdateFile(change, absolutePath, relPath, task, callbacks, isWriteProtected)
+				}
+			}
+
+			task.consecutiveMistakeCount = 0
+			task.recordToolUsage("apply_patch")
+		} catch (error) {
+			await handleError("apply patch", error as Error)
+			await task.diffViewProvider.reset()
+		}
+	}
+
+	private async handleAddFile(
+		change: ApplyPatchFileChange,
+		absolutePath: string,
+		relPath: string,
+		task: Task,
+		callbacks: ToolCallbacks,
+		isWriteProtected: boolean,
+	): Promise<void> {
+		const { askApproval, pushToolResult } = callbacks
+
+		// Check if file already exists
+		const fileExists = await fileExistsAtPath(absolutePath)
+		if (fileExists) {
+			task.consecutiveMistakeCount++
+			task.recordToolError("apply_patch")
+			const errorMessage = `File already exists: ${relPath}. Use Update File instead.`
+			await task.say("error", errorMessage)
+			pushToolResult(formatResponse.toolError(errorMessage))
+			return
+		}
+
+		const newContent = change.newContent || ""
+		const isOutsideWorkspace = isPathOutsideWorkspace(absolutePath)
+
+		// Initialize diff view for new file
+		task.diffViewProvider.editType = "create"
+		task.diffViewProvider.originalContent = undefined
+
+		const diff = formatResponse.createPrettyPatch(relPath, "", newContent)
+
+		// Check experiment settings
+		const provider = task.providerRef.deref()
+		const state = await provider?.getState()
+		const diagnosticsEnabled = state?.diagnosticsEnabled ?? true
+		const writeDelayMs = state?.writeDelayMs ?? DEFAULT_WRITE_DELAY_MS
+		const isPreventFocusDisruptionEnabled = experiments.isEnabled(
+			state?.experiments ?? {},
+			EXPERIMENT_IDS.PREVENT_FOCUS_DISRUPTION,
+		)
+
+		const sanitizedDiff = sanitizeUnifiedDiff(diff || "")
+		const diffStats = computeDiffStats(sanitizedDiff) || undefined
+
+		const sharedMessageProps: ClineSayTool = {
+			tool: "appliedDiff",
+			path: getReadablePath(task.cwd, relPath),
+			diff: sanitizedDiff,
+			isOutsideWorkspace,
+		}
+
+		const completeMessage = JSON.stringify({
+			...sharedMessageProps,
+			content: sanitizedDiff,
+			isProtected: isWriteProtected,
+			diffStats,
+		} satisfies ClineSayTool)
+
+		// Show diff view if focus disruption prevention is disabled
+		if (!isPreventFocusDisruptionEnabled) {
+			await task.diffViewProvider.open(relPath)
+			await task.diffViewProvider.update(newContent, true)
+			task.diffViewProvider.scrollToFirstDiff()
+		}
+
+		const didApprove = await askApproval("tool", completeMessage, undefined, isWriteProtected)
+
+		if (!didApprove) {
+			if (!isPreventFocusDisruptionEnabled) {
+				await task.diffViewProvider.revertChanges()
+			}
+			pushToolResult("Changes were rejected by the user.")
+			await task.diffViewProvider.reset()
+			return
+		}
+
+		// Save the changes
+		if (isPreventFocusDisruptionEnabled) {
+			await task.diffViewProvider.saveDirectly(relPath, newContent, true, diagnosticsEnabled, writeDelayMs)
+		} else {
+			await task.diffViewProvider.saveChanges(diagnosticsEnabled, writeDelayMs)
+		}
+
+		// Track file edit operation
+		await task.fileContextTracker.trackFileContext(relPath, "roo_edited" as RecordSource)
+		task.didEditFile = true
+
+		const message = await task.diffViewProvider.pushToolWriteResult(task, task.cwd, true)
+		pushToolResult(message)
+		await task.diffViewProvider.reset()
+		task.processQueuedMessages()
+	}
+
+	private async handleDeleteFile(
+		absolutePath: string,
+		relPath: string,
+		task: Task,
+		callbacks: ToolCallbacks,
+		isWriteProtected: boolean,
+	): Promise<void> {
+		const { askApproval, pushToolResult } = callbacks
+
+		// Check if file exists
+		const fileExists = await fileExistsAtPath(absolutePath)
+		if (!fileExists) {
+			task.consecutiveMistakeCount++
+			task.recordToolError("apply_patch")
+			const errorMessage = `File not found: ${relPath}. Cannot delete a non-existent file.`
+			await task.say("error", errorMessage)
+			pushToolResult(formatResponse.toolError(errorMessage))
+			return
+		}
+
+		const isOutsideWorkspace = isPathOutsideWorkspace(absolutePath)
+
+		const sharedMessageProps: ClineSayTool = {
+			tool: "appliedDiff",
+			path: getReadablePath(task.cwd, relPath),
+			diff: `File will be deleted: ${relPath}`,
+			isOutsideWorkspace,
+		}
+
+		const completeMessage = JSON.stringify({
+			...sharedMessageProps,
+			content: `Delete file: ${relPath}`,
+			isProtected: isWriteProtected,
+		} satisfies ClineSayTool)
+
+		const didApprove = await askApproval("tool", completeMessage, undefined, isWriteProtected)
+
+		if (!didApprove) {
+			pushToolResult("Delete operation was rejected by the user.")
+			return
+		}
+
+		// Delete the file
+		try {
+			await fs.unlink(absolutePath)
+		} catch (error) {
+			const errorMessage = `Failed to delete file '${relPath}': ${error instanceof Error ? error.message : String(error)}`
+			await task.say("error", errorMessage)
+			pushToolResult(formatResponse.toolError(errorMessage))
+			return
+		}
+
+		task.didEditFile = true
+		pushToolResult(`Successfully deleted ${relPath}`)
+		task.processQueuedMessages()
+	}
+
+	private async handleUpdateFile(
+		change: ApplyPatchFileChange,
+		absolutePath: string,
+		relPath: string,
+		task: Task,
+		callbacks: ToolCallbacks,
+		isWriteProtected: boolean,
+	): Promise<void> {
+		const { askApproval, pushToolResult } = callbacks
+
+		// Check if file exists
+		const fileExists = await fileExistsAtPath(absolutePath)
+		if (!fileExists) {
+			task.consecutiveMistakeCount++
+			task.recordToolError("apply_patch")
+			const errorMessage = `File not found: ${relPath}. Cannot update a non-existent file.`
+			await task.say("error", errorMessage)
+			pushToolResult(formatResponse.toolError(errorMessage))
+			return
+		}
+
+		const originalContent = change.originalContent || ""
+		const newContent = change.newContent || ""
+		const isOutsideWorkspace = isPathOutsideWorkspace(absolutePath)
+
+		// Initialize diff view
+		task.diffViewProvider.editType = "modify"
+		task.diffViewProvider.originalContent = originalContent
+
+		// Generate and validate diff
+		const diff = formatResponse.createPrettyPatch(relPath, originalContent, newContent)
+		if (!diff) {
+			pushToolResult(`No changes needed for '${relPath}'`)
+			await task.diffViewProvider.reset()
+			return
+		}
+
+		// Check experiment settings
+		const provider = task.providerRef.deref()
+		const state = await provider?.getState()
+		const diagnosticsEnabled = state?.diagnosticsEnabled ?? true
+		const writeDelayMs = state?.writeDelayMs ?? DEFAULT_WRITE_DELAY_MS
+		const isPreventFocusDisruptionEnabled = experiments.isEnabled(
+			state?.experiments ?? {},
+			EXPERIMENT_IDS.PREVENT_FOCUS_DISRUPTION,
+		)
+
+		const sanitizedDiff = sanitizeUnifiedDiff(diff)
+		const diffStats = computeDiffStats(sanitizedDiff) || undefined
+
+		const sharedMessageProps: ClineSayTool = {
+			tool: "appliedDiff",
+			path: getReadablePath(task.cwd, relPath),
+			diff: sanitizedDiff,
+			isOutsideWorkspace,
+		}
+
+		const completeMessage = JSON.stringify({
+			...sharedMessageProps,
+			content: sanitizedDiff,
+			isProtected: isWriteProtected,
+			diffStats,
+		} satisfies ClineSayTool)
+
+		// Show diff view if focus disruption prevention is disabled
+		if (!isPreventFocusDisruptionEnabled) {
+			await task.diffViewProvider.open(relPath)
+			await task.diffViewProvider.update(newContent, true)
+			task.diffViewProvider.scrollToFirstDiff()
+		}
+
+		const didApprove = await askApproval("tool", completeMessage, undefined, isWriteProtected)
+
+		if (!didApprove) {
+			if (!isPreventFocusDisruptionEnabled) {
+				await task.diffViewProvider.revertChanges()
+			}
+			pushToolResult("Changes were rejected by the user.")
+			await task.diffViewProvider.reset()
+			return
+		}
+
+		// Handle file move if specified
+		if (change.movePath) {
+			const moveAbsolutePath = path.resolve(task.cwd, change.movePath)
+
+			// Validate destination path access permissions
+			const moveAccessAllowed = task.rooIgnoreController?.validateAccess(change.movePath)
+			if (!moveAccessAllowed) {
+				await task.say("rooignore_error", change.movePath)
+				pushToolResult(formatResponse.rooIgnoreError(change.movePath))
+				await task.diffViewProvider.reset()
+				return
+			}
+
+			// Check if destination path is write-protected
+			const isMovePathWriteProtected = task.rooProtectedController?.isWriteProtected(change.movePath) || false
+			if (isMovePathWriteProtected) {
+				task.consecutiveMistakeCount++
+				task.recordToolError("apply_patch")
+				const errorMessage = `Cannot move file to write-protected path: ${change.movePath}`
+				await task.say("error", errorMessage)
+				pushToolResult(formatResponse.toolError(errorMessage))
+				await task.diffViewProvider.reset()
+				return
+			}
+
+			// Check if destination path is outside workspace
+			const isMoveOutsideWorkspace = isPathOutsideWorkspace(moveAbsolutePath)
+			if (isMoveOutsideWorkspace) {
+				task.consecutiveMistakeCount++
+				task.recordToolError("apply_patch")
+				const errorMessage = `Cannot move file to path outside workspace: ${change.movePath}`
+				await task.say("error", errorMessage)
+				pushToolResult(formatResponse.toolError(errorMessage))
+				await task.diffViewProvider.reset()
+				return
+			}
+
+			// Save new content to the new path
+			if (isPreventFocusDisruptionEnabled) {
+				await task.diffViewProvider.saveDirectly(
+					change.movePath,
+					newContent,
+					false,
+					diagnosticsEnabled,
+					writeDelayMs,
+				)
+			} else {
+				// Write to new path and delete old file
+				const parentDir = path.dirname(moveAbsolutePath)
+				await fs.mkdir(parentDir, { recursive: true })
+				await fs.writeFile(moveAbsolutePath, newContent, "utf8")
+			}
+
+			// Delete the original file
+			try {
+				await fs.unlink(absolutePath)
+			} catch (error) {
+				console.error(`Failed to delete original file after move: ${error}`)
+			}
+
+			await task.fileContextTracker.trackFileContext(change.movePath, "roo_edited" as RecordSource)
+		} else {
+			// Save changes to the same file
+			if (isPreventFocusDisruptionEnabled) {
+				await task.diffViewProvider.saveDirectly(relPath, newContent, false, diagnosticsEnabled, writeDelayMs)
+			} else {
+				await task.diffViewProvider.saveChanges(diagnosticsEnabled, writeDelayMs)
+			}
+
+			await task.fileContextTracker.trackFileContext(relPath, "roo_edited" as RecordSource)
+		}
+
+		task.didEditFile = true
+
+		const message = await task.diffViewProvider.pushToolWriteResult(task, task.cwd, false)
+		pushToolResult(message)
+		await task.diffViewProvider.reset()
+		task.processQueuedMessages()
+	}
+
+	override async handlePartial(task: Task, block: ToolUse<"apply_patch">): Promise<void> {
+		const patch: string | undefined = block.params.patch
+
+		let patchPreview: string | undefined
+		if (patch) {
+			// Show first few lines of the patch
+			const lines = patch.split("\n").slice(0, 5)
+			patchPreview = lines.join("\n") + (patch.split("\n").length > 5 ? "\n..." : "")
+		}
+
+		const sharedMessageProps: ClineSayTool = {
+			tool: "appliedDiff",
+			path: "",
+			diff: patchPreview || "Parsing patch...",
+			isOutsideWorkspace: false,
+		}
+
+		await task.ask("tool", JSON.stringify(sharedMessageProps), block.partial).catch(() => {})
+	}
+}
+
+export const applyPatchTool = new ApplyPatchTool()

+ 189 - 0
src/core/tools/apply-patch/__tests__/apply.spec.ts

@@ -0,0 +1,189 @@
+import { applyChunksToContent, ApplyPatchError } from "../apply"
+import type { UpdateFileChunk } from "../parser"
+
+describe("apply-patch apply", () => {
+	describe("applyChunksToContent", () => {
+		it("should apply simple replacement", () => {
+			const original = "foo\nbar\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: null,
+					oldLines: ["foo", "bar"],
+					newLines: ["foo", "baz"],
+					isEndOfFile: false,
+				},
+			]
+			const result = applyChunksToContent(original, "test.txt", chunks)
+			expect(result).toBe("foo\nbaz\n")
+		})
+
+		it("should apply insertion", () => {
+			const original = "foo\nbar\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: null,
+					oldLines: ["foo"],
+					newLines: ["foo", "inserted"],
+					isEndOfFile: false,
+				},
+			]
+			const result = applyChunksToContent(original, "test.txt", chunks)
+			expect(result).toBe("foo\ninserted\nbar\n")
+		})
+
+		it("should apply deletion", () => {
+			const original = "foo\nbar\nbaz\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: null,
+					oldLines: ["foo", "bar"],
+					newLines: ["foo"],
+					isEndOfFile: false,
+				},
+			]
+			const result = applyChunksToContent(original, "test.txt", chunks)
+			expect(result).toBe("foo\nbaz\n")
+		})
+
+		it("should apply multiple chunks", () => {
+			const original = "foo\nbar\nbaz\nqux\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: null,
+					oldLines: ["foo", "bar"],
+					newLines: ["foo", "BAR"],
+					isEndOfFile: false,
+				},
+				{
+					changeContext: null,
+					oldLines: ["baz", "qux"],
+					newLines: ["baz", "QUX"],
+					isEndOfFile: false,
+				},
+			]
+			const result = applyChunksToContent(original, "test.txt", chunks)
+			expect(result).toBe("foo\nBAR\nbaz\nQUX\n")
+		})
+
+		it("should use context to find location", () => {
+			const original = "class Foo:\n    def bar(self):\n        pass\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: "def bar(self):",
+					oldLines: ["        pass"],
+					newLines: ["        return 123"],
+					isEndOfFile: false,
+				},
+			]
+			const result = applyChunksToContent(original, "test.py", chunks)
+			expect(result).toBe("class Foo:\n    def bar(self):\n        return 123\n")
+		})
+
+		it("should throw when context not found", () => {
+			const original = "foo\nbar\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: "nonexistent",
+					oldLines: ["foo"],
+					newLines: ["baz"],
+					isEndOfFile: false,
+				},
+			]
+			expect(() => applyChunksToContent(original, "test.txt", chunks)).toThrow(ApplyPatchError)
+			expect(() => applyChunksToContent(original, "test.txt", chunks)).toThrow("Failed to find context")
+		})
+
+		it("should throw when old lines not found", () => {
+			const original = "foo\nbar\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: null,
+					oldLines: ["nonexistent"],
+					newLines: ["baz"],
+					isEndOfFile: false,
+				},
+			]
+			expect(() => applyChunksToContent(original, "test.txt", chunks)).toThrow(ApplyPatchError)
+			expect(() => applyChunksToContent(original, "test.txt", chunks)).toThrow("Failed to find expected lines")
+		})
+
+		it("should handle pure addition (empty oldLines)", () => {
+			const original = "foo\nbar\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: null,
+					oldLines: [],
+					newLines: ["added"],
+					isEndOfFile: false,
+				},
+			]
+			const result = applyChunksToContent(original, "test.txt", chunks)
+			// Pure addition goes at the end
+			expect(result).toBe("foo\nbar\nadded\n")
+		})
+
+		it("should handle isEndOfFile flag", () => {
+			const original = "foo\nbar\nbaz\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: null,
+					oldLines: ["baz"],
+					newLines: ["BAZ", "qux"],
+					isEndOfFile: true,
+				},
+			]
+			const result = applyChunksToContent(original, "test.txt", chunks)
+			expect(result).toBe("foo\nbar\nBAZ\nqux\n")
+		})
+
+		it("should handle interleaved changes", () => {
+			const original = "a\nb\nc\nd\ne\nf\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: null,
+					oldLines: ["a", "b"],
+					newLines: ["a", "B"],
+					isEndOfFile: false,
+				},
+				{
+					changeContext: null,
+					oldLines: ["d", "e"],
+					newLines: ["d", "E"],
+					isEndOfFile: false,
+				},
+			]
+			const result = applyChunksToContent(original, "test.txt", chunks)
+			expect(result).toBe("a\nB\nc\nd\nE\nf\n")
+		})
+
+		it("should preserve trailing newline in result", () => {
+			const original = "foo\nbar"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: null,
+					oldLines: ["bar"],
+					newLines: ["baz"],
+					isEndOfFile: false,
+				},
+			]
+			const result = applyChunksToContent(original, "test.txt", chunks)
+			// Should add trailing newline
+			expect(result).toBe("foo\nbaz\n")
+		})
+
+		it("should handle trailing empty line in pattern", () => {
+			const original = "foo\nbar\n"
+			const chunks: UpdateFileChunk[] = [
+				{
+					changeContext: null,
+					oldLines: ["foo", "bar", ""],
+					newLines: ["foo", "baz", ""],
+					isEndOfFile: false,
+				},
+			]
+			// Should still work by stripping trailing empty
+			const result = applyChunksToContent(original, "test.txt", chunks)
+			expect(result).toBe("foo\nbaz\n")
+		})
+	})
+})

+ 317 - 0
src/core/tools/apply-patch/__tests__/parser.spec.ts

@@ -0,0 +1,317 @@
+import { parsePatch, ParseError } from "../parser"
+
+describe("apply_patch parser", () => {
+	describe("parsePatch", () => {
+		it("should reject patch without Begin Patch marker", () => {
+			expect(() => parsePatch("bad")).toThrow(ParseError)
+			expect(() => parsePatch("bad")).toThrow("The first line of the patch must be '*** Begin Patch'")
+		})
+
+		it("should reject patch without End Patch marker", () => {
+			expect(() => parsePatch("*** Begin Patch\nbad")).toThrow(ParseError)
+			expect(() => parsePatch("*** Begin Patch\nbad")).toThrow(
+				"The last line of the patch must be '*** End Patch'",
+			)
+		})
+
+		it("should parse empty patch", () => {
+			const result = parsePatch("*** Begin Patch\n*** End Patch")
+			expect(result.hunks).toEqual([])
+		})
+
+		it("should parse Add File hunk", () => {
+			const result = parsePatch(`*** Begin Patch
+*** Add File: path/add.py
++abc
++def
+*** End Patch`)
+
+			expect(result.hunks).toHaveLength(1)
+			expect(result.hunks[0]).toEqual({
+				type: "AddFile",
+				path: "path/add.py",
+				contents: "abc\ndef\n",
+			})
+		})
+
+		it("should parse Delete File hunk", () => {
+			const result = parsePatch(`*** Begin Patch
+*** Delete File: path/delete.py
+*** End Patch`)
+
+			expect(result.hunks).toHaveLength(1)
+			expect(result.hunks[0]).toEqual({
+				type: "DeleteFile",
+				path: "path/delete.py",
+			})
+		})
+
+		it("should parse Update File hunk with context", () => {
+			const result = parsePatch(`*** Begin Patch
+*** Update File: path/update.py
+@@ def f():
+-    pass
++    return 123
+*** End Patch`)
+
+			expect(result.hunks).toHaveLength(1)
+			expect(result.hunks[0]).toEqual({
+				type: "UpdateFile",
+				path: "path/update.py",
+				movePath: null,
+				chunks: [
+					{
+						changeContext: "def f():",
+						oldLines: ["    pass"],
+						newLines: ["    return 123"],
+						isEndOfFile: false,
+					},
+				],
+			})
+		})
+
+		it("should parse Update File hunk with Move to", () => {
+			const result = parsePatch(`*** Begin Patch
+*** Update File: path/update.py
+*** Move to: path/update2.py
+@@ def f():
+-    pass
++    return 123
+*** End Patch`)
+
+			expect(result.hunks).toHaveLength(1)
+			expect(result.hunks[0]).toEqual({
+				type: "UpdateFile",
+				path: "path/update.py",
+				movePath: "path/update2.py",
+				chunks: [
+					{
+						changeContext: "def f():",
+						oldLines: ["    pass"],
+						newLines: ["    return 123"],
+						isEndOfFile: false,
+					},
+				],
+			})
+		})
+
+		it("should parse Update File hunk with empty context marker", () => {
+			const result = parsePatch(`*** Begin Patch
+*** Update File: file.py
+@@
++line
+*** End Patch`)
+
+			expect(result.hunks).toHaveLength(1)
+			expect(result.hunks[0]).toEqual({
+				type: "UpdateFile",
+				path: "file.py",
+				movePath: null,
+				chunks: [
+					{
+						changeContext: null,
+						oldLines: [],
+						newLines: ["line"],
+						isEndOfFile: false,
+					},
+				],
+			})
+		})
+
+		it("should parse multiple hunks", () => {
+			const result = parsePatch(`*** Begin Patch
+*** Add File: path/add.py
++abc
++def
+*** Delete File: path/delete.py
+*** Update File: path/update.py
+*** Move to: path/update2.py
+@@ def f():
+-    pass
++    return 123
+*** End Patch`)
+
+			expect(result.hunks).toHaveLength(3)
+			expect(result.hunks[0]).toEqual({
+				type: "AddFile",
+				path: "path/add.py",
+				contents: "abc\ndef\n",
+			})
+			expect(result.hunks[1]).toEqual({
+				type: "DeleteFile",
+				path: "path/delete.py",
+			})
+			expect(result.hunks[2]).toEqual({
+				type: "UpdateFile",
+				path: "path/update.py",
+				movePath: "path/update2.py",
+				chunks: [
+					{
+						changeContext: "def f():",
+						oldLines: ["    pass"],
+						newLines: ["    return 123"],
+						isEndOfFile: false,
+					},
+				],
+			})
+		})
+
+		it("should parse Update hunk followed by Add hunk", () => {
+			const result = parsePatch(`*** Begin Patch
+*** Update File: file.py
+@@
++line
+*** Add File: other.py
++content
+*** End Patch`)
+
+			expect(result.hunks).toHaveLength(2)
+			expect(result.hunks[0]).toEqual({
+				type: "UpdateFile",
+				path: "file.py",
+				movePath: null,
+				chunks: [
+					{
+						changeContext: null,
+						oldLines: [],
+						newLines: ["line"],
+						isEndOfFile: false,
+					},
+				],
+			})
+			expect(result.hunks[1]).toEqual({
+				type: "AddFile",
+				path: "other.py",
+				contents: "content\n",
+			})
+		})
+
+		it("should parse Update hunk without explicit @@ header for first chunk", () => {
+			const result = parsePatch(`*** Begin Patch
+*** Update File: file2.py
+ import foo
++bar
+*** End Patch`)
+
+			expect(result.hunks).toHaveLength(1)
+			expect(result.hunks[0]).toEqual({
+				type: "UpdateFile",
+				path: "file2.py",
+				movePath: null,
+				chunks: [
+					{
+						changeContext: null,
+						oldLines: ["import foo"],
+						newLines: ["import foo", "bar"],
+						isEndOfFile: false,
+					},
+				],
+			})
+		})
+
+		it("should reject empty Update File hunk", () => {
+			expect(() =>
+				parsePatch(`*** Begin Patch
+*** Update File: test.py
+*** End Patch`),
+			).toThrow(ParseError)
+		})
+
+		it("should handle heredoc-wrapped patches (lenient mode)", () => {
+			const result = parsePatch(`<<EOF
+*** Begin Patch
+*** Add File: foo
++hi
+*** End Patch
+EOF`)
+
+			expect(result.hunks).toHaveLength(1)
+			expect(result.hunks[0]).toEqual({
+				type: "AddFile",
+				path: "foo",
+				contents: "hi\n",
+			})
+		})
+
+		it("should handle single-quoted heredoc", () => {
+			const result = parsePatch(`<<'EOF'
+*** Begin Patch
+*** Add File: foo
++hi
+*** End Patch
+EOF`)
+
+			expect(result.hunks).toHaveLength(1)
+		})
+
+		it("should handle double-quoted heredoc", () => {
+			const result = parsePatch(`<<"EOF"
+*** Begin Patch
+*** Add File: foo
++hi
+*** End Patch
+EOF`)
+
+			expect(result.hunks).toHaveLength(1)
+		})
+
+		it("should parse chunk with End of File marker", () => {
+			const result = parsePatch(`*** Begin Patch
+*** Update File: file.py
+@@
++line
+*** End of File
+*** End Patch`)
+
+			expect(result.hunks).toHaveLength(1)
+			expect(result.hunks[0]).toEqual({
+				type: "UpdateFile",
+				path: "file.py",
+				movePath: null,
+				chunks: [
+					{
+						changeContext: null,
+						oldLines: [],
+						newLines: ["line"],
+						isEndOfFile: true,
+					},
+				],
+			})
+		})
+
+		it("should parse multiple chunks in one Update File", () => {
+			const result = parsePatch(`*** Begin Patch
+*** Update File: multi.txt
+@@
+ foo
+-bar
++BAR
+@@
+ baz
+-qux
++QUX
+*** End Patch`)
+
+			expect(result.hunks).toHaveLength(1)
+			expect(result.hunks[0]).toEqual({
+				type: "UpdateFile",
+				path: "multi.txt",
+				movePath: null,
+				chunks: [
+					{
+						changeContext: null,
+						oldLines: ["foo", "bar"],
+						newLines: ["foo", "BAR"],
+						isEndOfFile: false,
+					},
+					{
+						changeContext: null,
+						oldLines: ["baz", "qux"],
+						newLines: ["baz", "QUX"],
+						isEndOfFile: false,
+					},
+				],
+			})
+		})
+	})
+})

+ 97 - 0
src/core/tools/apply-patch/__tests__/seek-sequence.spec.ts

@@ -0,0 +1,97 @@
+import { seekSequence } from "../seek-sequence"
+
+describe("seek-sequence", () => {
+	describe("seekSequence", () => {
+		function toVec(strings: string[]): string[] {
+			return strings
+		}
+
+		it("should match exact sequence", () => {
+			const lines = toVec(["foo", "bar", "baz"])
+			const pattern = toVec(["bar", "baz"])
+			expect(seekSequence(lines, pattern, 0, false)).toBe(1)
+		})
+
+		it("should return start for empty pattern", () => {
+			const lines = toVec(["foo", "bar"])
+			const pattern = toVec([])
+			expect(seekSequence(lines, pattern, 0, false)).toBe(0)
+			expect(seekSequence(lines, pattern, 5, false)).toBe(5)
+		})
+
+		it("should return null when pattern is longer than input", () => {
+			const lines = toVec(["just one line"])
+			const pattern = toVec(["too", "many", "lines"])
+			expect(seekSequence(lines, pattern, 0, false)).toBeNull()
+		})
+
+		it("should match ignoring trailing whitespace", () => {
+			const lines = toVec(["foo   ", "bar\t\t"])
+			const pattern = toVec(["foo", "bar"])
+			expect(seekSequence(lines, pattern, 0, false)).toBe(0)
+		})
+
+		it("should match ignoring leading and trailing whitespace", () => {
+			const lines = toVec(["    foo   ", "   bar\t"])
+			const pattern = toVec(["foo", "bar"])
+			expect(seekSequence(lines, pattern, 0, false)).toBe(0)
+		})
+
+		it("should respect start parameter", () => {
+			const lines = toVec(["foo", "bar", "foo", "baz"])
+			const pattern = toVec(["foo"])
+			// Starting at 0 should find first foo
+			expect(seekSequence(lines, pattern, 0, false)).toBe(0)
+			// Starting at 1 should find second foo
+			expect(seekSequence(lines, pattern, 1, false)).toBe(2)
+		})
+
+		it("should search from end when eof is true", () => {
+			const lines = toVec(["foo", "bar", "foo", "baz"])
+			const pattern = toVec(["foo", "baz"])
+			// With eof=true, should find at the end
+			expect(seekSequence(lines, pattern, 0, true)).toBe(2)
+		})
+
+		it("should handle Unicode normalization - dashes", () => {
+			// EN DASH (\u2013) and NON-BREAKING HYPHEN (\u2011) → ASCII '-'
+			const lines = toVec(["hello \u2013 world \u2011 test"])
+			const pattern = toVec(["hello - world - test"])
+			expect(seekSequence(lines, pattern, 0, false)).toBe(0)
+		})
+
+		it("should handle Unicode normalization - quotes", () => {
+			// Fancy single quotes → ASCII '\''
+			const lines = toVec(["it\u2019s working"]) // RIGHT SINGLE QUOTATION MARK
+			const pattern = toVec(["it's working"])
+			expect(seekSequence(lines, pattern, 0, false)).toBe(0)
+		})
+
+		it("should handle Unicode normalization - double quotes", () => {
+			// Fancy double quotes → ASCII '"'
+			const lines = toVec(["\u201Chello\u201D"]) // LEFT/RIGHT DOUBLE QUOTATION MARK
+			const pattern = toVec(['"hello"'])
+			expect(seekSequence(lines, pattern, 0, false)).toBe(0)
+		})
+
+		it("should handle Unicode normalization - non-breaking space", () => {
+			// Non-breaking space (\u00A0) → normal space
+			const lines = toVec(["hello\u00A0world"])
+			const pattern = toVec(["hello world"])
+			expect(seekSequence(lines, pattern, 0, false)).toBe(0)
+		})
+
+		it("should return null when pattern not found", () => {
+			const lines = toVec(["foo", "bar", "baz"])
+			const pattern = toVec(["qux"])
+			expect(seekSequence(lines, pattern, 0, false)).toBeNull()
+		})
+
+		it("should return null when start is past possible match", () => {
+			const lines = toVec(["foo", "bar", "baz"])
+			const pattern = toVec(["foo", "bar"])
+			// Starting at 2, there's not enough room for a 2-line pattern
+			expect(seekSequence(lines, pattern, 2, false)).toBeNull()
+		})
+	})
+})

+ 205 - 0
src/core/tools/apply-patch/apply.ts

@@ -0,0 +1,205 @@
+/**
+ * Core patch application logic for the apply_patch tool.
+ * Transforms file contents using parsed hunks.
+ */
+
+import type { Hunk, UpdateFileChunk } from "./parser"
+import { seekSequence } from "./seek-sequence"
+
+/**
+ * Error during patch application.
+ */
+export class ApplyPatchError extends Error {
+	constructor(message: string) {
+		super(message)
+		this.name = "ApplyPatchError"
+	}
+}
+
+/**
+ * Result of applying a patch to a file.
+ */
+export interface ApplyPatchFileChange {
+	type: "add" | "delete" | "update"
+	/** Original path of the file */
+	path: string
+	/** New path if the file was moved/renamed */
+	movePath?: string
+	/** Original content (for delete/update) */
+	originalContent?: string
+	/** New content (for add/update) */
+	newContent?: string
+}
+
+/**
+ * Compute the replacements needed to transform originalLines into the new lines.
+ * Each replacement is [startIndex, oldLength, newLines].
+ */
+function computeReplacements(
+	originalLines: string[],
+	filePath: string,
+	chunks: UpdateFileChunk[],
+): Array<[number, number, string[]]> {
+	const replacements: Array<[number, number, string[]]> = []
+	let lineIndex = 0
+
+	for (const chunk of chunks) {
+		// If a chunk has a change_context, find it first
+		if (chunk.changeContext !== null) {
+			const idx = seekSequence(originalLines, [chunk.changeContext], lineIndex, false)
+			if (idx === null) {
+				throw new ApplyPatchError(`Failed to find context '${chunk.changeContext}' in ${filePath}`)
+			}
+			lineIndex = idx + 1
+		}
+
+		if (chunk.oldLines.length === 0) {
+			// Pure addition (no old lines). Add at the end or before final empty line.
+			const insertionIdx =
+				originalLines.length > 0 && originalLines[originalLines.length - 1] === ""
+					? originalLines.length - 1
+					: originalLines.length
+			replacements.push([insertionIdx, 0, chunk.newLines])
+			continue
+		}
+
+		// Try to find the old_lines in the file
+		let pattern = chunk.oldLines
+		let newSlice = chunk.newLines
+		let found = seekSequence(originalLines, pattern, lineIndex, chunk.isEndOfFile)
+
+		// If not found and pattern ends with empty string (trailing newline),
+		// retry without it
+		if (found === null && pattern.length > 0 && pattern[pattern.length - 1] === "") {
+			pattern = pattern.slice(0, -1)
+			if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") {
+				newSlice = newSlice.slice(0, -1)
+			}
+			found = seekSequence(originalLines, pattern, lineIndex, chunk.isEndOfFile)
+		}
+
+		if (found !== null) {
+			replacements.push([found, pattern.length, newSlice])
+			lineIndex = found + pattern.length
+		} else {
+			throw new ApplyPatchError(
+				`Failed to find expected lines in ${filePath}:\n${chunk.oldLines.join("\n").substring(0, 200)}${chunk.oldLines.join("\n").length > 200 ? "..." : ""}`,
+			)
+		}
+	}
+
+	// Sort replacements by start index
+	replacements.sort((a, b) => a[0] - b[0])
+
+	return replacements
+}
+
+/**
+ * Apply replacements to the original lines, returning the modified content.
+ * Replacements must be applied in reverse order to preserve indices.
+ */
+function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] {
+	const result = [...lines]
+
+	// Apply in reverse order so earlier replacements don't shift later indices
+	for (let i = replacements.length - 1; i >= 0; i--) {
+		const [startIdx, oldLen, newSegment] = replacements[i]!
+
+		// Remove old lines
+		result.splice(startIdx, oldLen, ...newSegment)
+	}
+
+	return result
+}
+
+/**
+ * Apply chunks to file content, returning the new content.
+ *
+ * @param originalContent - The original file content
+ * @param filePath - The file path (for error messages)
+ * @param chunks - The update chunks to apply
+ * @returns The new file content
+ */
+export function applyChunksToContent(originalContent: string, filePath: string, chunks: UpdateFileChunk[]): string {
+	// Split content into lines
+	let originalLines = originalContent.split("\n")
+
+	// Drop trailing empty element that results from final newline
+	// so that line counts match standard diff behavior
+	if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") {
+		originalLines = originalLines.slice(0, -1)
+	}
+
+	const replacements = computeReplacements(originalLines, filePath, chunks)
+	let newLines = applyReplacements(originalLines, replacements)
+
+	// Ensure file ends with newline
+	if (newLines.length === 0 || newLines[newLines.length - 1] !== "") {
+		newLines = [...newLines, ""]
+	}
+
+	return newLines.join("\n")
+}
+
+/**
+ * Process a single hunk and return the file change.
+ *
+ * @param hunk - The hunk to process
+ * @param readFile - Function to read file contents
+ * @returns The file change result
+ */
+export async function processHunk(
+	hunk: Hunk,
+	readFile: (path: string) => Promise<string>,
+): Promise<ApplyPatchFileChange> {
+	switch (hunk.type) {
+		case "AddFile":
+			return {
+				type: "add",
+				path: hunk.path,
+				newContent: hunk.contents,
+			}
+
+		case "DeleteFile": {
+			const content = await readFile(hunk.path)
+			return {
+				type: "delete",
+				path: hunk.path,
+				originalContent: content,
+			}
+		}
+
+		case "UpdateFile": {
+			const originalContent = await readFile(hunk.path)
+			const newContent = applyChunksToContent(originalContent, hunk.path, hunk.chunks)
+			return {
+				type: "update",
+				path: hunk.path,
+				movePath: hunk.movePath ?? undefined,
+				originalContent,
+				newContent,
+			}
+		}
+	}
+}
+
+/**
+ * Process all hunks in a patch.
+ *
+ * @param hunks - The hunks to process
+ * @param readFile - Function to read file contents
+ * @returns Array of file changes
+ */
+export async function processAllHunks(
+	hunks: Hunk[],
+	readFile: (path: string) => Promise<string>,
+): Promise<ApplyPatchFileChange[]> {
+	const changes: ApplyPatchFileChange[] = []
+
+	for (const hunk of hunks) {
+		const change = await processHunk(hunk, readFile)
+		changes.push(change)
+	}
+
+	return changes
+}

+ 14 - 0
src/core/tools/apply-patch/index.ts

@@ -0,0 +1,14 @@
+/**
+ * apply_patch tool module
+ *
+ * A stripped-down, file-oriented diff format designed to be easy to parse and safe to apply.
+ * Based on the Codex apply_patch specification.
+ */
+
+export { parsePatch, ParseError } from "./parser"
+export type { Hunk, UpdateFileChunk, ApplyPatchArgs } from "./parser"
+
+export { seekSequence } from "./seek-sequence"
+
+export { applyChunksToContent, processHunk, processAllHunks, ApplyPatchError } from "./apply"
+export type { ApplyPatchFileChange } from "./apply"

+ 332 - 0
src/core/tools/apply-patch/parser.ts

@@ -0,0 +1,332 @@
+/**
+ * Parser for the apply_patch tool format.
+ * Converts patch text into structured hunks following the Codex apply_patch specification.
+ *
+ * Grammar:
+ * Patch := Begin { FileOp } End
+ * Begin := "*** Begin Patch" NEWLINE
+ * End := "*** End Patch" NEWLINE
+ * FileOp := AddFile | DeleteFile | UpdateFile
+ * AddFile := "*** Add File: " path NEWLINE { "+" line NEWLINE }
+ * DeleteFile := "*** Delete File: " path NEWLINE
+ * UpdateFile := "*** Update File: " path NEWLINE [ MoveTo ] { Hunk }
+ * MoveTo := "*** Move to: " newPath NEWLINE
+ * Hunk := "@@" [ header ] NEWLINE { HunkLine } [ "*** End of File" NEWLINE ]
+ * HunkLine := (" " | "-" | "+") text NEWLINE
+ */
+
+const BEGIN_PATCH_MARKER = "*** Begin Patch"
+const END_PATCH_MARKER = "*** End Patch"
+const ADD_FILE_MARKER = "*** Add File: "
+const DELETE_FILE_MARKER = "*** Delete File: "
+const UPDATE_FILE_MARKER = "*** Update File: "
+const MOVE_TO_MARKER = "*** Move to: "
+const EOF_MARKER = "*** End of File"
+const CHANGE_CONTEXT_MARKER = "@@ "
+const EMPTY_CHANGE_CONTEXT_MARKER = "@@"
+
+/**
+ * Represents an error during patch parsing.
+ */
+export class ParseError extends Error {
+	constructor(
+		message: string,
+		public lineNumber?: number,
+	) {
+		super(lineNumber !== undefined ? `Line ${lineNumber}: ${message}` : message)
+		this.name = "ParseError"
+	}
+}
+
+/**
+ * A chunk within an UpdateFile hunk.
+ */
+export interface UpdateFileChunk {
+	/** Optional context line (e.g., class or function name) to narrow search */
+	changeContext: string | null
+	/** Lines to find and replace (context + removed lines) */
+	oldLines: string[]
+	/** Lines to replace with (context + added lines) */
+	newLines: string[]
+	/** If true, old_lines must match at end of file */
+	isEndOfFile: boolean
+}
+
+/**
+ * Represents a file operation in a patch.
+ */
+export type Hunk =
+	| {
+			type: "AddFile"
+			path: string
+			contents: string
+	  }
+	| {
+			type: "DeleteFile"
+			path: string
+	  }
+	| {
+			type: "UpdateFile"
+			path: string
+			movePath: string | null
+			chunks: UpdateFileChunk[]
+	  }
+
+/**
+ * Result of parsing a patch.
+ */
+export interface ApplyPatchArgs {
+	hunks: Hunk[]
+	patch: string
+}
+
+/**
+ * Check if lines start and end with correct patch markers.
+ */
+function checkPatchBoundaries(lines: string[]): void {
+	if (lines.length === 0) {
+		throw new ParseError("Empty patch")
+	}
+
+	const firstLine = lines[0]?.trim()
+	const lastLine = lines[lines.length - 1]?.trim()
+
+	if (firstLine !== BEGIN_PATCH_MARKER) {
+		throw new ParseError("The first line of the patch must be '*** Begin Patch'")
+	}
+
+	if (lastLine !== END_PATCH_MARKER) {
+		throw new ParseError("The last line of the patch must be '*** End Patch'")
+	}
+}
+
+/**
+ * Parse a single UpdateFileChunk from lines.
+ * Returns the parsed chunk and number of lines consumed.
+ */
+function parseUpdateFileChunk(
+	lines: string[],
+	lineNumber: number,
+	allowMissingContext: boolean,
+): { chunk: UpdateFileChunk; linesConsumed: number } {
+	if (lines.length === 0) {
+		throw new ParseError("Update hunk does not contain any lines", lineNumber)
+	}
+
+	let changeContext: string | null = null
+	let startIndex = 0
+
+	// Check for context marker
+	if (lines[0] === EMPTY_CHANGE_CONTEXT_MARKER) {
+		changeContext = null
+		startIndex = 1
+	} else if (lines[0]?.startsWith(CHANGE_CONTEXT_MARKER)) {
+		changeContext = lines[0].substring(CHANGE_CONTEXT_MARKER.length)
+		startIndex = 1
+	} else if (!allowMissingContext) {
+		throw new ParseError(`Expected update hunk to start with a @@ context marker, got: '${lines[0]}'`, lineNumber)
+	}
+
+	if (startIndex >= lines.length) {
+		throw new ParseError("Update hunk does not contain any lines", lineNumber + 1)
+	}
+
+	const chunk: UpdateFileChunk = {
+		changeContext,
+		oldLines: [],
+		newLines: [],
+		isEndOfFile: false,
+	}
+
+	let parsedLines = 0
+	for (let i = startIndex; i < lines.length; i++) {
+		const line = lines[i]
+
+		if (line === EOF_MARKER) {
+			if (parsedLines === 0) {
+				throw new ParseError("Update hunk does not contain any lines", lineNumber + 1)
+			}
+			chunk.isEndOfFile = true
+			parsedLines++
+			break
+		}
+
+		const firstChar = line.charAt(0)
+
+		// Empty line is treated as context
+		if (line === "") {
+			chunk.oldLines.push("")
+			chunk.newLines.push("")
+			parsedLines++
+			continue
+		}
+
+		switch (firstChar) {
+			case " ":
+				// Context line
+				chunk.oldLines.push(line.substring(1))
+				chunk.newLines.push(line.substring(1))
+				parsedLines++
+				break
+			case "+":
+				// Added line
+				chunk.newLines.push(line.substring(1))
+				parsedLines++
+				break
+			case "-":
+				// Removed line
+				chunk.oldLines.push(line.substring(1))
+				parsedLines++
+				break
+			default:
+				// If we haven't parsed any lines yet, it's an error
+				if (parsedLines === 0) {
+					throw new ParseError(
+						`Unexpected line found in update hunk: '${line}'. Every line should start with ' ' (context line), '+' (added line), or '-' (removed line)`,
+						lineNumber + 1,
+					)
+				}
+				// Otherwise, assume this is the start of the next hunk
+				return { chunk, linesConsumed: parsedLines + startIndex }
+		}
+	}
+
+	return { chunk, linesConsumed: parsedLines + startIndex }
+}
+
+/**
+ * Parse a single hunk (file operation) from lines.
+ * Returns the parsed hunk and number of lines consumed.
+ */
+function parseOneHunk(lines: string[], lineNumber: number): { hunk: Hunk; linesConsumed: number } {
+	const firstLine = lines[0]?.trim()
+
+	// Add File
+	if (firstLine?.startsWith(ADD_FILE_MARKER)) {
+		const path = firstLine.substring(ADD_FILE_MARKER.length)
+		let contents = ""
+		let parsedLines = 1
+
+		for (let i = 1; i < lines.length; i++) {
+			const line = lines[i]
+			if (line?.startsWith("+")) {
+				contents += line.substring(1) + "\n"
+				parsedLines++
+			} else {
+				break
+			}
+		}
+
+		return {
+			hunk: { type: "AddFile", path, contents },
+			linesConsumed: parsedLines,
+		}
+	}
+
+	// Delete File
+	if (firstLine?.startsWith(DELETE_FILE_MARKER)) {
+		const path = firstLine.substring(DELETE_FILE_MARKER.length)
+		return {
+			hunk: { type: "DeleteFile", path },
+			linesConsumed: 1,
+		}
+	}
+
+	// Update File
+	if (firstLine?.startsWith(UPDATE_FILE_MARKER)) {
+		const path = firstLine.substring(UPDATE_FILE_MARKER.length)
+		let remainingLines = lines.slice(1)
+		let parsedLines = 1
+
+		// Check for optional Move to line
+		let movePath: string | null = null
+		if (remainingLines[0]?.startsWith(MOVE_TO_MARKER)) {
+			movePath = remainingLines[0].substring(MOVE_TO_MARKER.length)
+			remainingLines = remainingLines.slice(1)
+			parsedLines++
+		}
+
+		const chunks: UpdateFileChunk[] = []
+
+		while (remainingLines.length > 0) {
+			// Skip blank lines between chunks
+			if (remainingLines[0]?.trim() === "") {
+				parsedLines++
+				remainingLines = remainingLines.slice(1)
+				continue
+			}
+
+			// Stop if we hit another file operation marker
+			if (remainingLines[0]?.startsWith("***")) {
+				break
+			}
+
+			const { chunk, linesConsumed } = parseUpdateFileChunk(
+				remainingLines,
+				lineNumber + parsedLines,
+				chunks.length === 0, // Allow missing context for first chunk
+			)
+			chunks.push(chunk)
+			parsedLines += linesConsumed
+			remainingLines = remainingLines.slice(linesConsumed)
+		}
+
+		if (chunks.length === 0) {
+			throw new ParseError(`Update file hunk for path '${path}' is empty`, lineNumber)
+		}
+
+		return {
+			hunk: { type: "UpdateFile", path, movePath, chunks },
+			linesConsumed: parsedLines,
+		}
+	}
+
+	throw new ParseError(
+		`'${firstLine}' is not a valid hunk header. Valid hunk headers: '*** Add File: {path}', '*** Delete File: {path}', '*** Update File: {path}'`,
+		lineNumber,
+	)
+}
+
+/**
+ * Parse a patch string into structured hunks.
+ *
+ * @param patch - The patch text to parse
+ * @returns Parsed patch with hunks
+ * @throws ParseError if the patch is invalid
+ */
+export function parsePatch(patch: string): ApplyPatchArgs {
+	const trimmedPatch = patch.trim()
+	const lines = trimmedPatch.split("\n")
+
+	// Handle heredoc-wrapped patches (lenient mode)
+	let effectiveLines = lines
+	if (lines.length >= 4) {
+		const firstLine = lines[0]
+		const lastLine = lines[lines.length - 1]
+		if (
+			(firstLine === "<<EOF" || firstLine === "<<'EOF'" || firstLine === '<<"EOF"') &&
+			lastLine?.endsWith("EOF")
+		) {
+			effectiveLines = lines.slice(1, lines.length - 1)
+		}
+	}
+
+	checkPatchBoundaries(effectiveLines)
+
+	const hunks: Hunk[] = []
+	const lastLineIndex = effectiveLines.length - 1
+	let remainingLines = effectiveLines.slice(1, lastLineIndex) // Skip Begin and End markers
+	let lineNumber = 2 // Start at line 2 (after Begin Patch)
+
+	while (remainingLines.length > 0) {
+		const { hunk, linesConsumed } = parseOneHunk(remainingLines, lineNumber)
+		hunks.push(hunk)
+		lineNumber += linesConsumed
+		remainingLines = remainingLines.slice(linesConsumed)
+	}
+
+	return {
+		hunks,
+		patch: effectiveLines.join("\n"),
+	}
+}

+ 153 - 0
src/core/tools/apply-patch/seek-sequence.ts

@@ -0,0 +1,153 @@
+/**
+ * Fuzzy sequence matching for the apply_patch tool.
+ * Implements multi-pass sequence matching (exact, trim-end, trim, Unicode-normalized)
+ * to locate old_lines and change_context within a file.
+ */
+
+/**
+ * Normalize common Unicode punctuation to ASCII equivalents.
+ * This allows patches written with plain ASCII to match source files
+ * containing typographic characters.
+ */
+function normalizeUnicode(s: string): string {
+	return s
+		.trim()
+		.split("")
+		.map((c) => {
+			// Various dash/hyphen code-points → ASCII '-'
+			if ("\u2010\u2011\u2012\u2013\u2014\u2015\u2212".includes(c)) {
+				return "-"
+			}
+			// Fancy single quotes → '\''
+			if ("\u2018\u2019\u201A\u201B".includes(c)) {
+				return "'"
+			}
+			// Fancy double quotes → '"'
+			if ("\u201C\u201D\u201E\u201F".includes(c)) {
+				return '"'
+			}
+			// Non-breaking space and other odd spaces → normal space
+			if ("\u00A0\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200A\u202F\u205F\u3000".includes(c)) {
+				return " "
+			}
+			return c
+		})
+		.join("")
+}
+
+/**
+ * Check if two arrays of lines match exactly.
+ */
+function exactMatch(lines: string[], pattern: string[], startIndex: number): boolean {
+	for (let i = 0; i < pattern.length; i++) {
+		if (lines[startIndex + i] !== pattern[i]) {
+			return false
+		}
+	}
+	return true
+}
+
+/**
+ * Check if two arrays of lines match after trimming trailing whitespace.
+ */
+function trimEndMatch(lines: string[], pattern: string[], startIndex: number): boolean {
+	for (let i = 0; i < pattern.length; i++) {
+		if (lines[startIndex + i]?.trimEnd() !== pattern[i]?.trimEnd()) {
+			return false
+		}
+	}
+	return true
+}
+
+/**
+ * Check if two arrays of lines match after trimming both sides.
+ */
+function trimMatch(lines: string[], pattern: string[], startIndex: number): boolean {
+	for (let i = 0; i < pattern.length; i++) {
+		if (lines[startIndex + i]?.trim() !== pattern[i]?.trim()) {
+			return false
+		}
+	}
+	return true
+}
+
+/**
+ * Check if two arrays of lines match after Unicode normalization.
+ */
+function normalizedMatch(lines: string[], pattern: string[], startIndex: number): boolean {
+	for (let i = 0; i < pattern.length; i++) {
+		if (normalizeUnicode(lines[startIndex + i] ?? "") !== normalizeUnicode(pattern[i] ?? "")) {
+			return false
+		}
+	}
+	return true
+}
+
+/**
+ * Attempt to find the sequence of pattern lines within lines beginning at or after start.
+ * Returns the starting index of the match or null if not found.
+ *
+ * Matches are attempted with decreasing strictness:
+ * 1. Exact match
+ * 2. Ignoring trailing whitespace
+ * 3. Ignoring leading and trailing whitespace
+ * 4. Unicode-normalized (handles typographic characters)
+ *
+ * When eof is true, first try starting at the end-of-file (so that patterns
+ * intended to match file endings are applied at the end), and fall back to
+ * searching from start if needed.
+ *
+ * Special cases handled defensively:
+ * - Empty pattern → returns start (no-op match)
+ * - pattern.length > lines.length → returns null (cannot match)
+ *
+ * @param lines - The file lines to search in
+ * @param pattern - The pattern lines to find
+ * @param start - The starting index to search from
+ * @param eof - Whether this chunk should match at end of file
+ * @returns The starting index of the match, or null if not found
+ */
+export function seekSequence(lines: string[], pattern: string[], start: number, eof: boolean): number | null {
+	if (pattern.length === 0) {
+		return start
+	}
+
+	// When the pattern is longer than available input, there's no possible match
+	if (pattern.length > lines.length) {
+		return null
+	}
+
+	const searchStart = eof && lines.length >= pattern.length ? lines.length - pattern.length : start
+
+	const maxStart = lines.length - pattern.length
+
+	// Pass 1: Exact match
+	for (let i = searchStart; i <= maxStart; i++) {
+		if (exactMatch(lines, pattern, i)) {
+			return i
+		}
+	}
+
+	// Pass 2: Trim-end match
+	for (let i = searchStart; i <= maxStart; i++) {
+		if (trimEndMatch(lines, pattern, i)) {
+			return i
+		}
+	}
+
+	// Pass 3: Trim both sides match
+	for (let i = searchStart; i <= maxStart; i++) {
+		if (trimMatch(lines, pattern, i)) {
+			return i
+		}
+	}
+
+	// Pass 4: Unicode-normalized match
+	for (let i = searchStart; i <= maxStart; i++) {
+		if (normalizedMatch(lines, pattern, i)) {
+			return i
+		}
+	}
+
+	return null
+}

+ 4 - 1
src/shared/tools.ts

@@ -72,6 +72,7 @@ export const toolParamNames = [
 	"image",
 	"files", // Native protocol parameter for read_file
 	"operations", // search_and_replace parameter for multiple operations
+	"patch", // apply_patch parameter
 ] as const
 
 export type ToolParamName = (typeof toolParamNames)[number]
@@ -90,6 +91,7 @@ export type NativeToolArgs = {
 	insert_content: { path: string; line: number; content: string }
 	apply_diff: { path: string; diff: string }
 	search_and_replace: { path: string; operations: Array<{ search: string; replace: string }> }
+	apply_patch: { patch: string }
 	ask_followup_question: {
 		question: string
 		follow_up: Array<{ text: string; mode?: string }>
@@ -249,6 +251,7 @@ export const TOOL_DISPLAY_NAMES: Record<ToolName, string> = {
 	write_to_file: "write files",
 	apply_diff: "apply changes",
 	search_and_replace: "apply changes using search and replace",
+	apply_patch: "apply patches using codex format",
 	search_files: "search files",
 	list_files: "list files",
 	list_code_definition_names: "list definitions",
@@ -280,7 +283,7 @@ export const TOOL_GROUPS: Record<ToolGroup, ToolGroupConfig> = {
 	},
 	edit: {
 		tools: ["apply_diff", "write_to_file", "insert_content", "generate_image"],
-		customTools: ["search_and_replace"],
+		customTools: ["search_and_replace", "apply_patch"],
 	},
 	browser: {
 		tools: ["browser_action"],

+ 7 - 2
webview-ui/src/components/chat/__tests__/ChatView.spec.tsx

@@ -463,7 +463,12 @@ describe("ChatView - Focus Grabbing Tests", () => {
 			],
 		})
 
-		// Clear any initial calls
+		// Wait for the component to fully render and settle before clearing mocks
+		await waitFor(() => {
+			expect(getByTestId("chat-textarea")).toBeInTheDocument()
+		})
+
+		// Clear any initial calls after state has settled
 		mockFocus.mockClear()
 
 		// Add follow-up question
@@ -484,7 +489,7 @@ describe("ChatView - Focus Grabbing Tests", () => {
 			],
 		})
 
-		// Wait a bit to ensure any focus operations would have occurred
+		// Wait for state update to complete
 		await waitFor(() => {
 			expect(getByTestId("chat-textarea")).toBeInTheDocument()
 		})