2026-02-26 13:32:02 +01:00
import syncFs from "node:fs" ;
2026-02-18 01:29:02 +00:00
import fs from "node:fs/promises" ;
import path from "node:path" ;
2026-02-18 01:34:35 +00:00
import type { AgentTool } from "@mariozechner/pi-agent-core" ;
import { Type } from "@sinclair/typebox" ;
2026-02-26 13:32:02 +01:00
import { openBoundaryFile , type BoundaryFileOpenResult } from "../infra/boundary-file-read.js" ;
import { writeFileWithinRoot } from "../infra/fs-safe.js" ;
2026-02-26 03:59:08 +01:00
import { PATH_ALIAS_POLICIES , type PathAliasPolicy } from "../infra/path-alias-guards.js" ;
2026-02-17 00:44:51 +00:00
import { applyUpdateHunk } from "./apply-patch-update.js" ;
2026-03-02 04:04:02 +00:00
import { toRelativeSandboxPath , resolvePathFromInput } from "./path-policy.js" ;
import { assertSandboxPath } from "./sandbox-paths.js" ;
2026-02-18 01:34:35 +00:00
import type { SandboxFsBridge } from "./sandbox/fs-bridge.js" ;
2026-01-12 03:42:49 +00:00
const BEGIN_PATCH_MARKER = "*** Begin Patch" ;
const END_PATCH_MARKER = "*** End Patch" ;
const ADD_FILE_MARKER = "*** Add File: " ;
const DELETE_FILE_MARKER = "*** Delete File: " ;
const UPDATE_FILE_MARKER = "*** Update File: " ;
const MOVE_TO_MARKER = "*** Move to: " ;
const EOF_MARKER = "*** End of File" ;
const CHANGE_CONTEXT_MARKER = "@@ " ;
const EMPTY_CHANGE_CONTEXT_MARKER = "@@" ;
type AddFileHunk = {
kind : "add" ;
path : string ;
contents : string ;
} ;
type DeleteFileHunk = {
kind : "delete" ;
path : string ;
} ;
type UpdateFileChunk = {
changeContext? : string ;
oldLines : string [ ] ;
newLines : string [ ] ;
isEndOfFile : boolean ;
} ;
type UpdateFileHunk = {
kind : "update" ;
path : string ;
movePath? : string ;
chunks : UpdateFileChunk [ ] ;
} ;
type Hunk = AddFileHunk | DeleteFileHunk | UpdateFileHunk ;
export type ApplyPatchSummary = {
added : string [ ] ;
modified : string [ ] ;
deleted : string [ ] ;
} ;
export type ApplyPatchResult = {
summary : ApplyPatchSummary ;
text : string ;
} ;
export type ApplyPatchToolDetails = {
summary : ApplyPatchSummary ;
} ;
2026-02-13 17:29:10 +02:00
type SandboxApplyPatchConfig = {
root : string ;
bridge : SandboxFsBridge ;
} ;
2026-01-12 03:42:49 +00:00
type ApplyPatchOptions = {
cwd : string ;
2026-02-13 17:29:10 +02:00
sandbox? : SandboxApplyPatchConfig ;
2026-02-15 01:21:07 +01:00
/** Restrict patch paths to the workspace root (cwd). Default: true. Set false to opt out. */
2026-02-14 23:50:04 +01:00
workspaceOnly? : boolean ;
2026-01-12 03:42:49 +00:00
signal? : AbortSignal ;
} ;
const applyPatchSchema = Type . Object ( {
input : Type.String ( {
description : "Patch content using the *** Begin Patch/End Patch format." ,
} ) ,
} ) ;
export function createApplyPatchTool (
2026-02-14 23:50:04 +01:00
options : { cwd? : string ; sandbox? : SandboxApplyPatchConfig ; workspaceOnly? : boolean } = { } ,
2026-02-13 15:42:00 +00:00
) : AgentTool < typeof applyPatchSchema , ApplyPatchToolDetails > {
2026-01-12 03:42:49 +00:00
const cwd = options . cwd ? ? process . cwd ( ) ;
2026-02-13 17:29:10 +02:00
const sandbox = options . sandbox ;
2026-02-15 01:21:07 +01:00
const workspaceOnly = options . workspaceOnly !== false ;
2026-01-12 03:42:49 +00:00
return {
name : "apply_patch" ,
label : "apply_patch" ,
description :
"Apply a patch to one or more files using the apply_patch format. The input should include *** Begin Patch and *** End Patch markers." ,
parameters : applyPatchSchema ,
execute : async ( _toolCallId , args , signal ) = > {
const params = args as { input? : string } ;
const input = typeof params . input === "string" ? params . input : "" ;
if ( ! input . trim ( ) ) {
throw new Error ( "Provide a patch input." ) ;
}
if ( signal ? . aborted ) {
const err = new Error ( "Aborted" ) ;
err . name = "AbortError" ;
throw err ;
}
const result = await applyPatch ( input , {
cwd ,
2026-02-13 17:29:10 +02:00
sandbox ,
2026-02-14 23:50:04 +01:00
workspaceOnly ,
2026-01-12 03:42:49 +00:00
signal ,
} ) ;
return {
content : [ { type : "text" , text : result.text } ] ,
details : { summary : result.summary } ,
} ;
} ,
} ;
}
export async function applyPatch (
input : string ,
options : ApplyPatchOptions ,
) : Promise < ApplyPatchResult > {
const parsed = parsePatchText ( input ) ;
if ( parsed . hunks . length === 0 ) {
throw new Error ( "No files were modified." ) ;
}
const summary : ApplyPatchSummary = {
added : [ ] ,
modified : [ ] ,
deleted : [ ] ,
} ;
const seen = {
added : new Set < string > ( ) ,
modified : new Set < string > ( ) ,
deleted : new Set < string > ( ) ,
} ;
2026-02-13 17:29:10 +02:00
const fileOps = resolvePatchFileOps ( options ) ;
2026-01-12 03:42:49 +00:00
for ( const hunk of parsed . hunks ) {
if ( options . signal ? . aborted ) {
const err = new Error ( "Aborted" ) ;
err . name = "AbortError" ;
throw err ;
}
if ( hunk . kind === "add" ) {
const target = await resolvePatchPath ( hunk . path , options ) ;
2026-02-13 17:29:10 +02:00
await ensureDir ( target . resolved , fileOps ) ;
await fileOps . writeFile ( target . resolved , hunk . contents ) ;
2026-01-12 03:42:49 +00:00
recordSummary ( summary , seen , "added" , target . display ) ;
continue ;
}
if ( hunk . kind === "delete" ) {
2026-02-26 03:59:08 +01:00
const target = await resolvePatchPath ( hunk . path , options , PATH_ALIAS_POLICIES . unlinkTarget ) ;
2026-02-13 17:29:10 +02:00
await fileOps . remove ( target . resolved ) ;
2026-01-12 03:42:49 +00:00
recordSummary ( summary , seen , "deleted" , target . display ) ;
continue ;
}
const target = await resolvePatchPath ( hunk . path , options ) ;
2026-02-13 17:29:10 +02:00
const applied = await applyUpdateHunk ( target . resolved , hunk . chunks , {
readFile : ( path ) = > fileOps . readFile ( path ) ,
} ) ;
2026-01-12 03:42:49 +00:00
if ( hunk . movePath ) {
const moveTarget = await resolvePatchPath ( hunk . movePath , options ) ;
2026-02-13 17:29:10 +02:00
await ensureDir ( moveTarget . resolved , fileOps ) ;
await fileOps . writeFile ( moveTarget . resolved , applied ) ;
await fileOps . remove ( target . resolved ) ;
2026-01-12 03:42:49 +00:00
recordSummary ( summary , seen , "modified" , moveTarget . display ) ;
} else {
2026-02-13 17:29:10 +02:00
await fileOps . writeFile ( target . resolved , applied ) ;
2026-01-12 03:42:49 +00:00
recordSummary ( summary , seen , "modified" , target . display ) ;
}
}
return {
summary ,
text : formatSummary ( summary ) ,
} ;
}
function recordSummary (
summary : ApplyPatchSummary ,
seen : {
added : Set < string > ;
modified : Set < string > ;
deleted : Set < string > ;
} ,
bucket : keyof ApplyPatchSummary ,
value : string ,
) {
2026-01-31 16:19:20 +09:00
if ( seen [ bucket ] . has ( value ) ) {
return ;
}
2026-01-12 03:42:49 +00:00
seen [ bucket ] . add ( value ) ;
summary [ bucket ] . push ( value ) ;
}
function formatSummary ( summary : ApplyPatchSummary ) : string {
const lines = [ "Success. Updated the following files:" ] ;
2026-01-31 16:19:20 +09:00
for ( const file of summary . added ) {
lines . push ( ` A ${ file } ` ) ;
}
for ( const file of summary . modified ) {
lines . push ( ` M ${ file } ` ) ;
}
for ( const file of summary . deleted ) {
lines . push ( ` D ${ file } ` ) ;
}
2026-01-12 03:42:49 +00:00
return lines . join ( "\n" ) ;
}
2026-02-13 17:29:10 +02:00
type PatchFileOps = {
readFile : ( filePath : string ) = > Promise < string > ;
writeFile : ( filePath : string , content : string ) = > Promise < void > ;
remove : ( filePath : string ) = > Promise < void > ;
mkdirp : ( dir : string ) = > Promise < void > ;
} ;
function resolvePatchFileOps ( options : ApplyPatchOptions ) : PatchFileOps {
if ( options . sandbox ) {
const { root , bridge } = options . sandbox ;
return {
readFile : async ( filePath ) = > {
const buf = await bridge . readFile ( { filePath , cwd : root } ) ;
return buf . toString ( "utf8" ) ;
} ,
writeFile : ( filePath , content ) = > bridge . writeFile ( { filePath , cwd : root , data : content } ) ,
remove : ( filePath ) = > bridge . remove ( { filePath , cwd : root , force : false } ) ,
mkdirp : ( dir ) = > bridge . mkdirp ( { filePath : dir , cwd : root } ) ,
} ;
}
2026-02-26 13:32:02 +01:00
const workspaceOnly = options . workspaceOnly !== false ;
2026-02-13 17:29:10 +02:00
return {
2026-02-26 13:32:02 +01:00
readFile : async ( filePath ) = > {
if ( ! workspaceOnly ) {
return await fs . readFile ( filePath , "utf8" ) ;
}
const opened = await openBoundaryFile ( {
absolutePath : filePath ,
rootPath : options.cwd ,
boundaryLabel : "workspace root" ,
} ) ;
assertBoundaryRead ( opened , filePath ) ;
try {
return syncFs . readFileSync ( opened . fd , "utf8" ) ;
} finally {
syncFs . closeSync ( opened . fd ) ;
}
} ,
writeFile : async ( filePath , content ) = > {
if ( ! workspaceOnly ) {
await fs . writeFile ( filePath , content , "utf8" ) ;
return ;
}
2026-03-02 04:04:02 +00:00
const relative = toRelativeSandboxPath ( options . cwd , filePath ) ;
2026-02-26 13:32:02 +01:00
await writeFileWithinRoot ( {
rootDir : options.cwd ,
relativePath : relative ,
data : content ,
encoding : "utf8" ,
} ) ;
} ,
2026-02-13 17:29:10 +02:00
remove : ( filePath ) = > fs . rm ( filePath ) ,
mkdirp : ( dir ) = > fs . mkdir ( dir , { recursive : true } ) . then ( ( ) = > { } ) ,
} ;
}
async function ensureDir ( filePath : string , ops : PatchFileOps ) {
2026-01-12 03:42:49 +00:00
const parent = path . dirname ( filePath ) ;
2026-01-31 16:19:20 +09:00
if ( ! parent || parent === "." ) {
return ;
}
2026-02-13 17:29:10 +02:00
await ops . mkdirp ( parent ) ;
2026-01-12 03:42:49 +00:00
}
async function resolvePatchPath (
filePath : string ,
options : ApplyPatchOptions ,
2026-02-26 03:59:08 +01:00
aliasPolicy : PathAliasPolicy = PATH_ALIAS_POLICIES . strict ,
2026-01-12 03:42:49 +00:00
) : Promise < { resolved : string ; display : string } > {
2026-02-13 17:29:10 +02:00
if ( options . sandbox ) {
const resolved = options . sandbox . bridge . resolvePath ( {
2026-01-12 03:42:49 +00:00
filePath ,
cwd : options.cwd ,
} ) ;
2026-02-24 02:23:30 +00:00
if ( options . workspaceOnly !== false ) {
await assertSandboxPath ( {
filePath : resolved.hostPath ,
cwd : options.cwd ,
root : options.cwd ,
2026-02-26 03:59:08 +01:00
allowFinalSymlinkForUnlink : aliasPolicy.allowFinalSymlinkForUnlink ,
allowFinalHardlinkForUnlink : aliasPolicy.allowFinalHardlinkForUnlink ,
2026-02-24 02:23:30 +00:00
} ) ;
}
2026-01-12 03:42:49 +00:00
return {
2026-02-13 17:29:10 +02:00
resolved : resolved.hostPath ,
display : resolved.relativePath || resolved . hostPath ,
2026-01-12 03:42:49 +00:00
} ;
}
2026-02-15 01:21:07 +01:00
const workspaceOnly = options . workspaceOnly !== false ;
const resolved = workspaceOnly
? (
await assertSandboxPath ( {
filePath ,
cwd : options.cwd ,
root : options.cwd ,
2026-02-26 03:59:08 +01:00
allowFinalSymlinkForUnlink : aliasPolicy.allowFinalSymlinkForUnlink ,
allowFinalHardlinkForUnlink : aliasPolicy.allowFinalHardlinkForUnlink ,
2026-02-15 01:21:07 +01:00
} )
) . resolved
2026-03-02 04:04:02 +00:00
: resolvePathFromInput ( filePath , options . cwd ) ;
2026-01-12 03:42:49 +00:00
return {
2026-02-14 23:50:04 +01:00
resolved ,
display : toDisplayPath ( resolved , options . cwd ) ,
2026-01-12 03:42:49 +00:00
} ;
}
2026-02-26 13:32:02 +01:00
function assertBoundaryRead (
opened : BoundaryFileOpenResult ,
targetPath : string ,
) : asserts opened is Extract < BoundaryFileOpenResult , { ok : true } > {
if ( opened . ok ) {
return ;
}
const reason = opened . reason === "validation" ? "unsafe path" : "path not found" ;
throw new Error ( ` Failed boundary read for ${ targetPath } ( ${ reason } ) ` ) ;
}
2026-01-12 03:42:49 +00:00
function toDisplayPath ( resolved : string , cwd : string ) : string {
const relative = path . relative ( cwd , resolved ) ;
2026-01-31 16:19:20 +09:00
if ( ! relative || relative === "" ) {
return path . basename ( resolved ) ;
}
if ( relative . startsWith ( ".." ) || path . isAbsolute ( relative ) ) {
return resolved ;
}
2026-01-12 03:42:49 +00:00
return relative ;
}
function parsePatchText ( input : string ) : { hunks : Hunk [ ] ; patch : string } {
const trimmed = input . trim ( ) ;
if ( ! trimmed ) {
throw new Error ( "Invalid patch: input is empty." ) ;
}
const lines = trimmed . split ( /\r?\n/ ) ;
const validated = checkPatchBoundariesLenient ( lines ) ;
const hunks : Hunk [ ] = [ ] ;
const lastLineIndex = validated . length - 1 ;
let remaining = validated . slice ( 1 , lastLineIndex ) ;
let lineNumber = 2 ;
while ( remaining . length > 0 ) {
const { hunk , consumed } = parseOneHunk ( remaining , lineNumber ) ;
hunks . push ( hunk ) ;
lineNumber += consumed ;
remaining = remaining . slice ( consumed ) ;
}
return { hunks , patch : validated.join ( "\n" ) } ;
}
function checkPatchBoundariesLenient ( lines : string [ ] ) : string [ ] {
const strictError = checkPatchBoundariesStrict ( lines ) ;
2026-01-31 16:19:20 +09:00
if ( ! strictError ) {
return lines ;
}
2026-01-12 03:42:49 +00:00
if ( lines . length < 4 ) {
throw new Error ( strictError ) ;
}
const first = lines [ 0 ] ;
const last = lines [ lines . length - 1 ] ;
2026-01-14 14:31:43 +00:00
if ( ( first === "<<EOF" || first === "<<'EOF'" || first === '<<"EOF"' ) && last . endsWith ( "EOF" ) ) {
2026-01-12 03:42:49 +00:00
const inner = lines . slice ( 1 , lines . length - 1 ) ;
const innerError = checkPatchBoundariesStrict ( inner ) ;
2026-01-31 16:19:20 +09:00
if ( ! innerError ) {
return inner ;
}
2026-01-12 03:42:49 +00:00
throw new Error ( innerError ) ;
}
throw new Error ( strictError ) ;
}
function checkPatchBoundariesStrict ( lines : string [ ] ) : string | null {
const firstLine = lines [ 0 ] ? . trim ( ) ;
const lastLine = lines [ lines . length - 1 ] ? . trim ( ) ;
if ( firstLine === BEGIN_PATCH_MARKER && lastLine === END_PATCH_MARKER ) {
return null ;
}
if ( firstLine !== BEGIN_PATCH_MARKER ) {
return "The first line of the patch must be '*** Begin Patch'" ;
}
return "The last line of the patch must be '*** End Patch'" ;
}
2026-01-14 14:31:43 +00:00
function parseOneHunk ( lines : string [ ] , lineNumber : number ) : { hunk : Hunk ; consumed : number } {
2026-01-12 03:42:49 +00:00
if ( lines . length === 0 ) {
throw new Error ( ` Invalid patch hunk at line ${ lineNumber } : empty hunk ` ) ;
}
const firstLine = lines [ 0 ] . trim ( ) ;
if ( firstLine . startsWith ( ADD_FILE_MARKER ) ) {
const targetPath = firstLine . slice ( ADD_FILE_MARKER . length ) ;
let contents = "" ;
let consumed = 1 ;
for ( const addLine of lines . slice ( 1 ) ) {
if ( addLine . startsWith ( "+" ) ) {
contents += ` ${ addLine . slice ( 1 ) } \ n ` ;
consumed += 1 ;
} else {
break ;
}
}
return {
hunk : { kind : "add" , path : targetPath , contents } ,
consumed ,
} ;
}
if ( firstLine . startsWith ( DELETE_FILE_MARKER ) ) {
const targetPath = firstLine . slice ( DELETE_FILE_MARKER . length ) ;
return {
hunk : { kind : "delete" , path : targetPath } ,
consumed : 1 ,
} ;
}
if ( firstLine . startsWith ( UPDATE_FILE_MARKER ) ) {
const targetPath = firstLine . slice ( UPDATE_FILE_MARKER . length ) ;
let remaining = lines . slice ( 1 ) ;
let consumed = 1 ;
let movePath : string | undefined ;
const moveCandidate = remaining [ 0 ] ? . trim ( ) ;
if ( moveCandidate ? . startsWith ( MOVE_TO_MARKER ) ) {
movePath = moveCandidate . slice ( MOVE_TO_MARKER . length ) ;
remaining = remaining . slice ( 1 ) ;
consumed += 1 ;
}
const chunks : UpdateFileChunk [ ] = [ ] ;
while ( remaining . length > 0 ) {
if ( remaining [ 0 ] . trim ( ) === "" ) {
remaining = remaining . slice ( 1 ) ;
consumed += 1 ;
continue ;
}
if ( remaining [ 0 ] . startsWith ( "***" ) ) {
break ;
}
const { chunk , consumed : chunkLines } = parseUpdateFileChunk (
remaining ,
lineNumber + consumed ,
chunks . length === 0 ,
) ;
chunks . push ( chunk ) ;
remaining = remaining . slice ( chunkLines ) ;
consumed += chunkLines ;
}
if ( chunks . length === 0 ) {
throw new Error (
` Invalid patch hunk at line ${ lineNumber } : Update file hunk for path ' ${ targetPath } ' is empty ` ,
) ;
}
return {
hunk : {
kind : "update" ,
path : targetPath ,
movePath ,
chunks ,
} ,
consumed ,
} ;
}
throw new Error (
` Invalid patch hunk at line ${ lineNumber } : ' ${ lines [ 0 ] } ' is not a valid hunk header. Valid hunk headers: '*** Add File: {path}', '*** Delete File: {path}', '*** Update File: {path}' ` ,
) ;
}
function parseUpdateFileChunk (
lines : string [ ] ,
lineNumber : number ,
allowMissingContext : boolean ,
) : { chunk : UpdateFileChunk ; consumed : number } {
if ( lines . length === 0 ) {
throw new Error (
` Invalid patch hunk at line ${ lineNumber } : Update hunk does not contain any lines ` ,
) ;
}
let changeContext : string | undefined ;
let startIndex = 0 ;
if ( lines [ 0 ] === EMPTY_CHANGE_CONTEXT_MARKER ) {
startIndex = 1 ;
} else if ( lines [ 0 ] . startsWith ( CHANGE_CONTEXT_MARKER ) ) {
changeContext = lines [ 0 ] . slice ( CHANGE_CONTEXT_MARKER . length ) ;
startIndex = 1 ;
} else if ( ! allowMissingContext ) {
throw new Error (
` Invalid patch hunk at line ${ lineNumber } : Expected update hunk to start with a @@ context marker, got: ' ${ lines [ 0 ] } ' ` ,
) ;
}
if ( startIndex >= lines . length ) {
throw new Error (
` Invalid patch hunk at line ${ lineNumber + 1 } : Update hunk does not contain any lines ` ,
) ;
}
const chunk : UpdateFileChunk = {
changeContext ,
oldLines : [ ] ,
newLines : [ ] ,
isEndOfFile : false ,
} ;
let parsedLines = 0 ;
for ( const line of lines . slice ( startIndex ) ) {
if ( line === EOF_MARKER ) {
if ( parsedLines === 0 ) {
throw new Error (
` Invalid patch hunk at line ${ lineNumber + 1 } : Update hunk does not contain any lines ` ,
) ;
}
chunk . isEndOfFile = true ;
parsedLines += 1 ;
break ;
}
const marker = line [ 0 ] ;
if ( ! marker ) {
chunk . oldLines . push ( "" ) ;
chunk . newLines . push ( "" ) ;
parsedLines += 1 ;
continue ;
}
if ( marker === " " ) {
const content = line . slice ( 1 ) ;
chunk . oldLines . push ( content ) ;
chunk . newLines . push ( content ) ;
parsedLines += 1 ;
continue ;
}
if ( marker === "+" ) {
chunk . newLines . push ( line . slice ( 1 ) ) ;
parsedLines += 1 ;
continue ;
}
if ( marker === "-" ) {
chunk . oldLines . push ( line . slice ( 1 ) ) ;
parsedLines += 1 ;
continue ;
}
if ( parsedLines === 0 ) {
throw new Error (
` Invalid patch hunk at line ${ lineNumber + 1 } : Unexpected line found in update hunk: ' ${ line } '. Every line should start with ' ' (context line), '+' (added line), or '-' (removed line) ` ,
) ;
}
break ;
}
return { chunk , consumed : parsedLines + startIndex } ;
}