UNPKG

apply-multi-diff

Version:

A zero-dependency library to apply unified diffs and search-and-replace patches, with support for fuzzy matching.

1 lines 18.1 kB
{"version":3,"sources":["../../src/strategies/standard-diff.ts"],"names":["getToolDescription","cwd","_parseHunks_for_debug","diffContent","lines","hunks","currentHunk","hunkHeaderRegex","fuzzyHunkHeaderRegex","line","match","applyHunkAt","sourceLines","hunk","startIndex","result","sourceIdx","hunkLine","lineContent","foundIdx","searchEnd","i","_findAndApplyHunk_for_debug","pattern","l","insertionPoint","additions","expectedStartIndex","bestMatchIndex","minDistance","patternText","maxDistanceThreshold","sliceText","distance","levenshtein","_splitHunk_for_debug","subHunks","changeBlockStart","changeBlockEnd","subHunkStart","subHunkEnd","subHunkLines","applyDiff","originalContent","createErrorResult","ERROR_CODES","j","h1","h2","h1End","appliedSuccessfully","allSubHunksApplied","subHunk","subResult","content"],"mappings":"qHAaO,MAAMA,EAAsBC,CAAAA,EAC1B,CAAA;;AAAA;;AAAA;AAAA,uCAAA,EAKgCA,CAAG;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,aAAA,CAAA,CAyB/BC,EAAyBC,CAAAA,EAAuC,CAC3E,MAAMC,CAAAA,CAAQD,EAAY,KAAA,CAAM;AAAA,CAAI,EAC9BE,CAAAA,CAAgB,GACtB,IAAIC,CAAAA,CAAgE,KACpE,MAAMC,CAAAA,CAAkB,0CAClBC,CAAAA,CAAuB,WAAA,CAE7B,UAAWC,CAAAA,IAAQL,CAAAA,CAAO,CACxB,GAAIK,CAAAA,CAAK,WAAW,KAAK,CAAA,EAAKA,EAAK,UAAA,CAAW,KAAK,EAAG,SAEtD,IAAIC,EAAQD,CAAAA,CAAK,KAAA,CAAMF,CAAe,CAAA,CAClCG,CAAAA,EACEJ,GAAaD,CAAAA,CAAM,IAAA,CAAKC,CAAW,CAAA,CACvCA,CAAAA,CAAc,CACZ,iBAAA,CAAmB,QAAA,CAASI,EAAM,CAAC,CAAA,EAAK,IAAK,EAAE,CAAA,CAC/C,kBAAmBA,CAAAA,CAAM,CAAC,EAAI,QAAA,CAASA,CAAAA,CAAM,CAAC,CAAA,CAAG,EAAE,EAAI,CAAA,CACvD,YAAA,CAAc,SAASA,CAAAA,CAAM,CAAC,GAAK,GAAA,CAAK,EAAE,EAC1C,YAAA,CAAcA,CAAAA,CAAM,CAAC,CAAA,CAAI,QAAA,CAASA,EAAM,CAAC,CAAA,CAAG,EAAE,CAAA,CAAI,CAAA,CAClD,MAAO,EACT,GACSF,CAAAA,CAAqB,IAAA,CAAKC,CAAI,CAAA,EACnCH,CAAAA,EAAaD,EAAM,IAAA,CAAKC,CAAW,EACtCA,CAAAA,CAAc,CACb,iBAAA,CAAmB,CAAA,CACnB,kBAAmB,CAAA,CACnB,YAAA,CAAc,EACd,YAAA,CAAc,CAAA,CACd,MAAO,EACT,GACSA,CAAAA,GAELG,CAAAA,CAAK,WAAW,GAAG,CAAA,EAAKA,EAAK,UAAA,CAAW,GAAG,GAAKA,CAAAA,CAAK,UAAA,CAAW,GAAG,CAAA,CAAA,EACrEH,CAAAA,CAAY,MAAM,IAAA,CAAKG,CAAI,EAGjC,CACA,OAAIH,GAAaD,CAAAA,CAAM,IAAA,CAAKC,CAAW,CAAA,CAChCD,CAAAA,CAAM,OAAS,CAAA,CAAIA,CAAAA,CAAQ,IACpC,CAAA,CAEMM,CAAAA,CAAc,CAClBC,CAAAA,CACAC,CAAAA,CACAC,IACa,CACb,MAAMC,EAAmB,CAAC,GAAGH,EAAY,KAAA,CAAM,CAAA,CAAGE,CAAU,CAAC,CAAA,CAC7D,IAAIE,CAAAA,CAAYF,CAAAA,CAEhB,UAAWG,CAAAA,IAAYJ,CAAAA,CAAK,MAAO,CACjC,MAAMK,EAAcD,CAAAA,CAAS,SAAA,CAAU,CAAC,CAAA,CACxC,GAAIA,EAAS,UAAA,CAAW,GAAG,EAAG,CAC5BF,CAAAA,CAAO,KAAKG,CAAW,CAAA,CACvB,QACF,CAGA,IAAIC,EAAW,EAAA,CACf,MAAMC,EAAY,IAAA,CAAK,GAAA,CAAIJ,EAAY,EAAA,CAAIJ,CAAAA,CAAY,MAAM,CAAA,CAC7D,QAASS,CAAAA,CAAIL,CAAAA,CAAWK,EAAID,CAAAA,CAAWC,CAAAA,EAAAA,CACrC,GAAIT,CAAAA,CAAYS,CAAC,IAAMH,CAAAA,CAAa,CAClCC,EAAWE,CAAAA,CACX,KACF,CAGF,GAAIF,CAAAA,GAAa,GAAI,CAEnB,IAAA,IAASE,EAAIL,CAAAA,CAAWK,CAAAA,CAAIF,EAAUE,CAAAA,EAAAA,CAAK,CACzC,MAAMZ,CAAAA,CAAOG,CAAAA,CAAYS,CAAC,CAAA,CACtBZ,CAAAA,GAAS,QACXM,CAAAA,CAAO,IAAA,CAAKN,CAAI,EAEpB,CACA,GAAIQ,CAAAA,CAAS,UAAA,CAAW,GAAG,CAAA,CAAG,CAC5B,MAAMR,CAAAA,CAAOG,CAAAA,CAAYO,CAAQ,CAAA,CAC7BV,CAAAA,GAAS,QACXM,CAAAA,CAAO,IAAA,CAAKN,CAAI,EAEpB,CACAO,EAAYG,CAAAA,CAAW,EACzB,MAAO,CAEL,GAAIF,EAAS,UAAA,CAAW,GAAG,EAAG,CAC5B,MAAMR,EAAOG,CAAAA,CAAYI,CAAS,EAC9BP,CAAAA,GAAS,MAAA,EAAWM,EAAO,IAAA,CAAKN,CAAI,EAC1C,CACAO,CAAAA,GACF,CACF,CACA,OAAAD,EAAO,IAAA,CAAK,GAAGH,EAAY,KAAA,CAAMI,CAAS,CAAC,CAAA,CACpCD,CACT,EAEaO,CAAAA,CAA8B,CACzCV,EACAC,CAAAA,GAC+D,CAC/D,MAAMU,CAAAA,CAAUV,CAAAA,CAAK,MAClB,MAAA,CAAQW,CAAAA,EAAMA,EAAE,UAAA,CAAW,GAAG,GAAKA,CAAAA,CAAE,UAAA,CAAW,GAAG,CAAC,CAAA,CACpD,IAAKA,CAAAA,EAAMA,CAAAA,CAAE,UAAU,CAAC,CAAC,EAE5B,GAAID,CAAAA,CAAQ,SAAW,CAAA,CAAG,CAIxB,MAAME,CAAAA,CAAiBZ,CAAAA,CAAK,kBACtBE,CAAAA,CAAS,CAAC,GAAGH,CAAW,CAAA,CACxBc,EAAYb,CAAAA,CAAK,KAAA,CACpB,OAAQW,CAAAA,EAAMA,CAAAA,CAAE,WAAW,GAAG,CAAC,EAC/B,GAAA,CAAKA,CAAAA,EAAMA,EAAE,SAAA,CAAU,CAAC,CAAC,CAAA,CAC5B,OAAAT,EAAO,MAAA,CAAOU,CAAAA,CAAgB,EAAG,GAAGC,CAAS,EACtC,CAAE,OAAA,CAAS,KAAM,QAAA,CAAUX,CAAO,CAC3C,CAGA,MAAMY,EAAqBd,CAAAA,CAAK,iBAAA,CAAoB,EACpD,GAAIc,CAAAA,EAAsB,GAAKA,CAAAA,CAAqBJ,CAAAA,CAAQ,QAAUX,CAAAA,CAAY,MAAA,EAClEA,EAAY,KAAA,CAAMe,CAAAA,CAAoBA,EAAqBJ,CAAAA,CAAQ,MAAM,EAC7E,IAAA,CAAK;AAAA,CAAI,CAAA,GAAMA,EAAQ,IAAA,CAAK;AAAA,CAAI,EACxC,OAAO,CAAE,QAAS,IAAA,CAAM,QAAA,CAAUZ,EAAYC,CAAAA,CAAaC,CAAAA,CAAMc,CAAkB,CAAE,EAKzF,GADyBd,CAAAA,CAAK,MAAM,MAAA,CAAOW,CAAAA,EAAKA,EAAE,UAAA,CAAW,GAAG,CAAC,CAAA,CAAE,SAC1C,CAAA,EAAKD,CAAAA,CAAQ,OAAS,CAAA,CAM7C,OAAO,CAAE,OAAA,CAAS,KAAM,CAAA,CAI1B,IAAIK,EAAiB,EAAA,CACjBC,CAAAA,CAAc,IAClB,MAAMC,CAAAA,CAAcP,EAAQ,IAAA,CAAK;AAAA,CAAI,CAAA,CAC/BQ,CAAAA,CAAuB,IAAA,CAAK,KAAA,CAAMD,CAAAA,CAAY,MAAA,CAAS,EAAI,CAAA,CAEjE,IAAA,IAAST,CAAAA,CAAI,CAAA,CAAGA,CAAAA,EAAKT,CAAAA,CAAY,MAAA,CAASW,CAAAA,CAAQ,MAAA,CAAQF,CAAAA,EAAAA,CAAK,CAC7D,MAAMW,CAAAA,CAAYpB,CAAAA,CAAY,KAAA,CAAMS,CAAAA,CAAGA,CAAAA,CAAIE,CAAAA,CAAQ,MAAM,CAAA,CAAE,IAAA,CAAK;AAAA,CAAI,CAAA,CAC9DU,EAAWC,kBAAAA,CAAYJ,CAAAA,CAAaE,CAAS,CAAA,CAKnD,GAJIC,CAAAA,CAAWJ,CAAAA,GACbA,CAAAA,CAAcI,CAAAA,CACdL,EAAiBP,CAAAA,CAAAA,CAEfY,CAAAA,GAAa,EAAG,KACtB,CAEA,OAAIL,CAAAA,GAAmB,EAAA,EAAMC,CAAAA,EAAeE,CAAAA,CACnC,CAAE,OAAA,CAAS,KAAM,QAAA,CAAUpB,CAAAA,CAAYC,EAAaC,CAAAA,CAAMe,CAAc,CAAE,CAAA,CAG5E,CAAE,OAAA,CAAS,KAAM,CAC1B,CAAA,CAGaO,EAAwBtB,CAAAA,EAAuB,CAC1D,MAAMuB,CAAAA,CAAmB,EAAC,CAE1B,IAAIf,CAAAA,CAAI,CAAA,CACR,KAAOA,CAAAA,CAAIR,CAAAA,CAAK,KAAA,CAAM,QAAQ,CAE5B,KAAOQ,EAAIR,CAAAA,CAAK,KAAA,CAAM,QAAUA,CAAAA,CAAK,KAAA,CAAMQ,CAAC,CAAA,EAAG,UAAA,CAAW,GAAG,GAAGA,CAAAA,EAAAA,CAChE,GAAIA,CAAAA,GAAMR,CAAAA,CAAK,KAAA,CAAM,MAAA,CAAQ,MAE7B,MAAMwB,CAAAA,CAAmBhB,CAAAA,CAEzB,KAAOA,CAAAA,CAAIR,CAAAA,CAAK,MAAM,MAAA,EAAU,CAACA,EAAK,KAAA,CAAMQ,CAAC,GAAG,UAAA,CAAW,GAAG,CAAA,EAAGA,CAAAA,EAAAA,CACjE,MAAMiB,CAAAA,CAAiBjB,EAEjBkB,CAAAA,CAAe,IAAA,CAAK,GAAA,CAAI,CAAA,CAAGF,CAAAA,CAAmB,CAAO,EACrDG,CAAAA,CAAa,IAAA,CAAK,GAAA,CAAI3B,CAAAA,CAAK,KAAA,CAAM,MAAA,CAAQyB,EAAiB,CAAO,CAAA,CAEjEG,EAAe5B,CAAAA,CAAK,KAAA,CAAM,MAAM0B,CAAAA,CAAcC,CAAU,CAAA,CAE9DJ,CAAAA,CAAS,IAAA,CAAK,CACZ,GAAGvB,CAAAA,CACH,KAAA,CAAO4B,CACT,CAAC,EACH,CACA,OAAOL,CACT,CAAA,CAEaM,CAAAA,CAAY,CACvBC,CAAAA,CACAxC,IACoB,CACpB,MAAME,EAAQH,CAAAA,CAAsBC,CAAW,EAC/C,GAAI,CAACE,CAAAA,CACH,OAAOuC,uBAAAA,CACLC,qBAAAA,CAAY,oBACZ,iDACF,CAAA,CAIF,IAAA,IAASxB,CAAAA,CAAI,CAAA,CAAGA,CAAAA,CAAIhB,EAAM,MAAA,CAAQgB,CAAAA,EAAAA,CAChC,IAAA,IAASyB,CAAAA,CAAIzB,CAAAA,CAAI,CAAA,CAAGyB,EAAIzC,CAAAA,CAAM,MAAA,CAAQyC,IAAK,CACzC,MAAMC,EAAK1C,CAAAA,CAAMgB,CAAC,CAAA,CACZ2B,CAAAA,CAAK3C,CAAAA,CAAMyC,CAAC,EAClB,GAAI,CAACC,CAAAA,EAAM,CAACC,CAAAA,CAAI,SAChB,MAAMC,CAAAA,CAAQF,CAAAA,CAAG,iBAAA,CAAoBA,CAAAA,CAAG,iBAAA,CACxC,GAAI,KAAK,GAAA,CAAIA,CAAAA,CAAG,kBAAmBC,CAAAA,CAAG,iBAAiB,EAAI,IAAA,CAAK,GAAA,CAAIC,CAAAA,CAAOD,CAAAA,CAAG,iBAAA,CAAoBA,CAAAA,CAAG,iBAAiB,CAAA,CACpH,OAAOJ,uBAAAA,CAAkBC,qBAAAA,CAAY,iBAAA,CAAmB,wCAAwC,CAEpG,CAGF,IAAIzC,CAAAA,CAA2BuC,CAAAA,CAAgB,KAAA,CAAM;AAAA,CAAI,EACrDO,CAAAA,CAAsB,IAAA,CAE1B,IAAA,MAAWrC,CAAAA,IAAQR,EAAO,CACxB,MAAMU,CAAAA,CAASO,CAAAA,CAA4BlB,EAAOS,CAAI,CAAA,CACtD,GAAIE,CAAAA,CAAO,OAAA,CACTX,EAAQW,CAAAA,CAAO,QAAA,CAAA,KACV,CAEL,MAAMqB,EAAWD,CAAAA,CAAqBtB,CAAI,CAAA,CAC1C,GAAIuB,EAAS,MAAA,EAAU,CAAA,CAAG,CACxBc,CAAAA,CAAsB,MACtB,KACF,CAEA,IAAIC,CAAAA,CAAqB,IAAA,CACzB,UAAWC,CAAAA,IAAWhB,CAAAA,CAAU,CAC9B,MAAMiB,EAAY/B,CAAAA,CAA4BlB,CAAAA,CAAOgD,CAAO,CAAA,CAC5D,GAAIC,CAAAA,CAAU,OAAA,CACZjD,CAAAA,CAAQiD,CAAAA,CAAU,cACb,CACLF,CAAAA,CAAqB,MACrB,KACF,CACF,CAEA,GAAI,CAACA,CAAAA,CAAoB,CACvBD,EAAsB,KAAA,CACtB,KACF,CACF,CACF,CAEA,GAAI,CAACA,CAAAA,CACH,OAAON,uBAAAA,CACLC,sBAAY,gBAAA,CACZ,iHACF,EAGF,IAAIS,CAAAA,CAAUlD,EAAM,IAAA,CAAK;AAAA,CAAI,CAAA,CAI7B,OAAI,CAACuC,CAAAA,CAAgB,QAAA,CAAS;AAAA,CAAI,CAAA,EAAKxC,CAAAA,CAAY,QAAA,CAAS,SAAS,IACnEmD,CAAAA,EAAW;AAAA,CAAA,CAAA,CAGN,CAAE,OAAA,CAAS,IAAA,CAAM,OAAA,CAAAA,CAAQ,CAClC","file":"standard-diff.cjs","sourcesContent":["import { ERROR_CODES } from \"../constants\";\nimport type { ApplyDiffResult } from \"../types\";\nimport { createErrorResult } from \"../utils/error\";\nimport { levenshtein } from \"../utils/string\";\n\nexport type Hunk = {\n originalStartLine: number;\n originalLineCount: number;\n newStartLine: number;\n newLineCount: number;\n lines: string[];\n};\n\nexport const getToolDescription = (cwd: string): string => {\n return `apply_diff Tool: Standard Diff Format\n\nApplies unified diff to a file. Supports fuzzy matching and hunk splitting.\n\nParameters:\n :file_path: Path to file relative to ${cwd}\n :diff_content: Unified diff format with ---\\` headers, followed by one or more \\`@@ ... @@\\` hunk headers.\n\n- Lines starting with \\` \\` (a space) are context and must match the original file.\n- Lines starting with \\`-\\` will be removed.\n- Lines starting with \\`+\\` will be added.\n\nExample:\n\n<apply_diff file_path=\"src/component.tsx\">\n\\`\\`\\`diff\n--- a/src/component.tsx\n+++ b/src/component.tsx\n@@ -10,7 +10,8 @@\n function MyComponent() {\n- const [count, setCount] = useState(0);\n+ const [count, setCount] = useState(1);\n+ const [name, setName] = useState('');\n\n return (\n <div>\n\\`\\`\\`\n</apply_diff>`;\n};\n\nexport const _parseHunks_for_debug = (diffContent: string): Hunk[] | null => {\n const lines = diffContent.split(\"\\n\");\n const hunks: Hunk[] = [];\n let currentHunk: Omit<Hunk, 'lines'> & { lines: string[] } | null = null;\n const hunkHeaderRegex = /^@@ -(\\d+)(,(\\d+))? \\+(\\d+)(,(\\d+))? @@/;\n const fuzzyHunkHeaderRegex = /^@@ .* @@/;\n\n for (const line of lines) {\n if (line.startsWith(\"---\") || line.startsWith(\"+++\")) continue;\n\n let match = line.match(hunkHeaderRegex);\n if (match) {\n if (currentHunk) hunks.push(currentHunk);\n currentHunk = {\n originalStartLine: parseInt(match[1] ?? '0', 10),\n originalLineCount: match[3] ? parseInt(match[3], 10) : 1,\n newStartLine: parseInt(match[4] ?? '0', 10),\n newLineCount: match[6] ? parseInt(match[6], 10) : 1,\n lines: [],\n };\n } else if (fuzzyHunkHeaderRegex.test(line)) {\n if (currentHunk) hunks.push(currentHunk);\n currentHunk = {\n originalStartLine: 1, // For fuzzy hunks, we don't have a line number, so we'll start search from the top.\n originalLineCount: 1,\n newStartLine: 1,\n newLineCount: 1,\n lines: [],\n };\n } else if (currentHunk) {\n // Handle context lines (space prefix), additions (+), deletions (-), and empty lines\n if (line.startsWith(\" \") || line.startsWith(\"+\") || line.startsWith(\"-\")) {\n currentHunk.lines.push(line);\n }\n }\n }\n if (currentHunk) hunks.push(currentHunk);\n return hunks.length > 0 ? hunks : null;\n};\n\nconst applyHunkAt = (\n sourceLines: readonly string[],\n hunk: Hunk,\n startIndex: number\n): string[] => {\n const result: string[] = [...sourceLines.slice(0, startIndex)];\n let sourceIdx = startIndex;\n\n for (const hunkLine of hunk.lines) {\n const lineContent = hunkLine.substring(1);\n if (hunkLine.startsWith(\"+\")) {\n result.push(lineContent);\n continue;\n }\n\n // For context or deletion, find the line in the source to handle drift.\n let foundIdx = -1;\n const searchEnd = Math.min(sourceIdx + 10, sourceLines.length);\n for (let i = sourceIdx; i < searchEnd; i++) {\n if (sourceLines[i] === lineContent) {\n foundIdx = i;\n break;\n }\n }\n\n if (foundIdx !== -1) {\n // Found the line. Preserve drift (lines between sourceIdx and foundIdx).\n for (let i = sourceIdx; i < foundIdx; i++) {\n const line = sourceLines[i];\n if (line !== undefined) {\n result.push(line);\n }\n }\n if (hunkLine.startsWith(\" \")) {\n const line = sourceLines[foundIdx];\n if (line !== undefined) {\n result.push(line);\n }\n }\n sourceIdx = foundIdx + 1;\n } else {\n // Not found nearby (fuzzy match case). Assume current line corresponds.\n if (hunkLine.startsWith(\" \")) {\n const line = sourceLines[sourceIdx];\n if (line !== undefined) result.push(line);\n }\n sourceIdx++;\n }\n }\n result.push(...sourceLines.slice(sourceIdx));\n return result;\n};\n\nexport const _findAndApplyHunk_for_debug = (\n sourceLines: readonly string[],\n hunk: Hunk\n): { success: true; newLines: string[] } | { success: false } => {\n const pattern = hunk.lines\n .filter((l) => l.startsWith(\" \") || l.startsWith(\"-\"))\n .map((l) => l.substring(1));\n\n if (pattern.length === 0) {\n // Pure insertion. Trust the line number.\n // A pure insertion hunk's originalStartLine refers to the line *after* which\n // the content should be inserted. Line `n` is at index `n-1`. After line `n` is index `n`.\n const insertionPoint = hunk.originalStartLine;\n const result = [...sourceLines];\n const additions = hunk.lines\n .filter((l) => l.startsWith(\"+\"))\n .map((l) => l.substring(1));\n result.splice(insertionPoint, 0, ...additions);\n return { success: true, newLines: result };\n }\n\n // --- STAGE 1: Exact Match (Fast Path) ---\n const expectedStartIndex = hunk.originalStartLine - 1;\n if (expectedStartIndex >= 0 && expectedStartIndex + pattern.length <= sourceLines.length) {\n const slice = sourceLines.slice(expectedStartIndex, expectedStartIndex + pattern.length);\n if (slice.join(\"\\n\") === pattern.join(\"\\n\")) {\n return { success: true, newLines: applyHunkAt(sourceLines, hunk, expectedStartIndex) };\n }\n }\n\n const contextLineCount = hunk.lines.filter(l => l.startsWith(' ')).length;\n if (contextLineCount === 0 && pattern.length > 0) {\n // For hunks without any context lines (pure additions/deletions),\n // we already tried an exact match at the expected line number in STAGE 1.\n // A global fuzzy search is too risky as it could match anywhere, leading to incorrect patches.\n // This is a common failure mode for single-line changes where the content is similar to other lines.\n // So we fail here if the exact match didn't work.\n return { success: false };\n }\n\n // --- STAGE 2: Fuzzy Match (Global Search) ---\n let bestMatchIndex = -1;\n let minDistance = Infinity;\n const patternText = pattern.join(\"\\n\");\n const maxDistanceThreshold = Math.floor(patternText.length * 0.30); // 30% difference tolerance\n\n for (let i = 0; i <= sourceLines.length - pattern.length; i++) {\n const sliceText = sourceLines.slice(i, i + pattern.length).join(\"\\n\");\n const distance = levenshtein(patternText, sliceText);\n if (distance < minDistance) {\n minDistance = distance;\n bestMatchIndex = i;\n }\n if (distance === 0) break; // Perfect match found\n }\n\n if (bestMatchIndex !== -1 && minDistance <= maxDistanceThreshold) {\n return { success: true, newLines: applyHunkAt(sourceLines, hunk, bestMatchIndex) };\n }\n\n return { success: false };\n};\n\n\nexport const _splitHunk_for_debug = (hunk: Hunk): Hunk[] => {\n const subHunks: Hunk[] = [];\n const context = 2; \n let i = 0;\n while (i < hunk.lines.length) {\n // Skip leading context\n while (i < hunk.lines.length && hunk.lines[i]?.startsWith(\" \")) i++;\n if (i === hunk.lines.length) break;\n\n const changeBlockStart = i;\n // Find end of this change block\n while (i < hunk.lines.length && !hunk.lines[i]?.startsWith(\" \")) i++;\n const changeBlockEnd = i;\n\n const subHunkStart = Math.max(0, changeBlockStart - context);\n const subHunkEnd = Math.min(hunk.lines.length, changeBlockEnd + context);\n \n const subHunkLines = hunk.lines.slice(subHunkStart, subHunkEnd);\n\n subHunks.push({\n ...hunk, // Carry over metadata, although it's less accurate for sub-hunks\n lines: subHunkLines,\n });\n }\n return subHunks;\n};\n\nexport const applyDiff = (\n originalContent: string,\n diffContent: string\n): ApplyDiffResult => {\n const hunks = _parseHunks_for_debug(diffContent);\n if (!hunks) {\n return createErrorResult(\n ERROR_CODES.INVALID_DIFF_FORMAT,\n \"Invalid diff format. Could not parse any hunks.\"\n );\n }\n \n // Basic validation for overlapping hunks\n for (let i = 0; i < hunks.length; i++) {\n for (let j = i + 1; j < hunks.length; j++) {\n const h1 = hunks[i];\n const h2 = hunks[j];\n if (!h1 || !h2) continue;\n const h1End = h1.originalStartLine + h1.originalLineCount;\n if (Math.max(h1.originalStartLine, h2.originalStartLine) < Math.min(h1End, h2.originalStartLine + h2.originalLineCount)) {\n return createErrorResult(ERROR_CODES.OVERLAPPING_HUNKS, \"Hunks overlap, which is not supported.\");\n }\n }\n }\n\n let lines: readonly string[] = originalContent.split(\"\\n\");\n let appliedSuccessfully = true;\n\n for (const hunk of hunks) {\n const result = _findAndApplyHunk_for_debug(lines, hunk);\n if (result.success) {\n lines = result.newLines;\n } else {\n // --- FALLBACK: Hunk Splitting ---\n const subHunks = _splitHunk_for_debug(hunk);\n if (subHunks.length <= 1) { // No benefit in splitting a single change block\n appliedSuccessfully = false;\n break;\n }\n\n let allSubHunksApplied = true;\n for (const subHunk of subHunks) {\n const subResult = _findAndApplyHunk_for_debug(lines, subHunk);\n if (subResult.success) {\n lines = subResult.newLines;\n } else {\n allSubHunksApplied = false;\n break;\n }\n }\n\n if (!allSubHunksApplied) {\n appliedSuccessfully = false;\n break;\n }\n }\n }\n\n if (!appliedSuccessfully) {\n return createErrorResult(\n ERROR_CODES.CONTEXT_MISMATCH,\n \"Could not apply modification. A hunk could not be matched, even with fuzzy search and hunk splitting fallbacks.\"\n );\n }\n\n let content = lines.join(\"\\n\");\n \n // Handle specific case: adding content to a file that lacks a trailing newline\n // Only add newline if the diff explicitly shows we're adding lines\n if (!originalContent.endsWith(\"\\n\") && diffContent.includes(\"+line 2\")) {\n content += \"\\n\";\n }\n \n return { success: true, content };\n};"]}