Files
openclaw/src/agents/pi-tools.read.ts
Peter Steinberger 65e13c7b6e fix(fs): honor unset tools.fs.workspaceOnly default (land #31128 by @SaucePackets)
Landed-from: #31128
Contributor: @SaucePackets
Co-authored-by: SaucePackets <33006469+SaucePackets@users.noreply.github.com>
2026-03-02 01:43:50 +00:00

904 lines
29 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import fs from "node:fs/promises";
import path from "node:path";
import { fileURLToPath } from "node:url";
import type { AgentToolResult } from "@mariozechner/pi-agent-core";
import { createEditTool, createReadTool, createWriteTool } from "@mariozechner/pi-coding-agent";
import {
SafeOpenError,
openFileWithinRoot,
readFileWithinRoot,
writeFileWithinRoot,
} from "../infra/fs-safe.js";
import { detectMime } from "../media/mime.js";
import { sniffMimeFromBase64 } from "../media/sniff-mime-from-base64.js";
import type { ImageSanitizationLimits } from "./image-sanitization.js";
import type { AnyAgentTool } from "./pi-tools.types.js";
import { assertSandboxPath } from "./sandbox-paths.js";
import type { SandboxFsBridge } from "./sandbox/fs-bridge.js";
import { sanitizeToolResultImages } from "./tool-images.js";
// NOTE(steipete): Upstream read now does file-magic MIME detection; we keep the wrapper
// to normalize payloads and sanitize oversized images before they hit providers.
type ToolContentBlock = AgentToolResult<unknown>["content"][number];
type ImageContentBlock = Extract<ToolContentBlock, { type: "image" }>;
type TextContentBlock = Extract<ToolContentBlock, { type: "text" }>;
const DEFAULT_READ_PAGE_MAX_BYTES = 50 * 1024;
const MAX_ADAPTIVE_READ_MAX_BYTES = 512 * 1024;
const ADAPTIVE_READ_CONTEXT_SHARE = 0.2;
const CHARS_PER_TOKEN_ESTIMATE = 4;
const MAX_ADAPTIVE_READ_PAGES = 8;
type OpenClawReadToolOptions = {
modelContextWindowTokens?: number;
imageSanitization?: ImageSanitizationLimits;
};
type ReadTruncationDetails = {
truncated: boolean;
outputLines: number;
firstLineExceedsLimit: boolean;
};
const READ_CONTINUATION_NOTICE_RE =
/\n\n\[(?:Showing lines [^\]]*?Use offset=\d+ to continue\.|\d+ more lines in file\. Use offset=\d+ to continue\.)\]\s*$/;
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
}
function resolveAdaptiveReadMaxBytes(options?: OpenClawReadToolOptions): number {
const contextWindowTokens = options?.modelContextWindowTokens;
if (
typeof contextWindowTokens !== "number" ||
!Number.isFinite(contextWindowTokens) ||
contextWindowTokens <= 0
) {
return DEFAULT_READ_PAGE_MAX_BYTES;
}
const fromContext = Math.floor(
contextWindowTokens * CHARS_PER_TOKEN_ESTIMATE * ADAPTIVE_READ_CONTEXT_SHARE,
);
return clamp(fromContext, DEFAULT_READ_PAGE_MAX_BYTES, MAX_ADAPTIVE_READ_MAX_BYTES);
}
function formatBytes(bytes: number): string {
if (bytes >= 1024 * 1024) {
return `${(bytes / (1024 * 1024)).toFixed(1)}MB`;
}
if (bytes >= 1024) {
return `${Math.round(bytes / 1024)}KB`;
}
return `${bytes}B`;
}
function getToolResultText(result: AgentToolResult<unknown>): string | undefined {
const content = Array.isArray(result.content) ? result.content : [];
const textBlocks = content
.map((block) => {
if (
block &&
typeof block === "object" &&
(block as { type?: unknown }).type === "text" &&
typeof (block as { text?: unknown }).text === "string"
) {
return (block as { text: string }).text;
}
return undefined;
})
.filter((value): value is string => typeof value === "string");
if (textBlocks.length === 0) {
return undefined;
}
return textBlocks.join("\n");
}
function withToolResultText(
result: AgentToolResult<unknown>,
text: string,
): AgentToolResult<unknown> {
const content = Array.isArray(result.content) ? result.content : [];
let replaced = false;
const nextContent: ToolContentBlock[] = content.map((block) => {
if (
!replaced &&
block &&
typeof block === "object" &&
(block as { type?: unknown }).type === "text"
) {
replaced = true;
return {
...(block as TextContentBlock),
text,
};
}
return block;
});
if (replaced) {
return {
...result,
content: nextContent as unknown as AgentToolResult<unknown>["content"],
};
}
const textBlock = { type: "text", text } as unknown as TextContentBlock;
return {
...result,
content: [textBlock] as unknown as AgentToolResult<unknown>["content"],
};
}
function extractReadTruncationDetails(
result: AgentToolResult<unknown>,
): ReadTruncationDetails | null {
const details = (result as { details?: unknown }).details;
if (!details || typeof details !== "object") {
return null;
}
const truncation = (details as { truncation?: unknown }).truncation;
if (!truncation || typeof truncation !== "object") {
return null;
}
const record = truncation as Record<string, unknown>;
if (record.truncated !== true) {
return null;
}
const outputLinesRaw = record.outputLines;
const outputLines =
typeof outputLinesRaw === "number" && Number.isFinite(outputLinesRaw)
? Math.max(0, Math.floor(outputLinesRaw))
: 0;
return {
truncated: true,
outputLines,
firstLineExceedsLimit: record.firstLineExceedsLimit === true,
};
}
function stripReadContinuationNotice(text: string): string {
return text.replace(READ_CONTINUATION_NOTICE_RE, "");
}
function stripReadTruncationContentDetails(
result: AgentToolResult<unknown>,
): AgentToolResult<unknown> {
const details = (result as { details?: unknown }).details;
if (!details || typeof details !== "object") {
return result;
}
const detailsRecord = details as Record<string, unknown>;
const truncationRaw = detailsRecord.truncation;
if (!truncationRaw || typeof truncationRaw !== "object") {
return result;
}
const truncation = truncationRaw as Record<string, unknown>;
if (!Object.prototype.hasOwnProperty.call(truncation, "content")) {
return result;
}
const { content: _content, ...restTruncation } = truncation;
return {
...result,
details: {
...detailsRecord,
truncation: restTruncation,
},
};
}
async function executeReadWithAdaptivePaging(params: {
base: AnyAgentTool;
toolCallId: string;
args: Record<string, unknown>;
signal?: AbortSignal;
maxBytes: number;
}): Promise<AgentToolResult<unknown>> {
const userLimit = params.args.limit;
const hasExplicitLimit =
typeof userLimit === "number" && Number.isFinite(userLimit) && userLimit > 0;
if (hasExplicitLimit) {
return await params.base.execute(params.toolCallId, params.args, params.signal);
}
const offsetRaw = params.args.offset;
let nextOffset =
typeof offsetRaw === "number" && Number.isFinite(offsetRaw) && offsetRaw > 0
? Math.floor(offsetRaw)
: 1;
let firstResult: AgentToolResult<unknown> | null = null;
let aggregatedText = "";
let aggregatedBytes = 0;
let capped = false;
let continuationOffset: number | undefined;
for (let page = 0; page < MAX_ADAPTIVE_READ_PAGES; page += 1) {
const pageArgs = { ...params.args, offset: nextOffset };
const pageResult = await params.base.execute(params.toolCallId, pageArgs, params.signal);
firstResult ??= pageResult;
const rawText = getToolResultText(pageResult);
if (typeof rawText !== "string") {
return pageResult;
}
const truncation = extractReadTruncationDetails(pageResult);
const canContinue =
Boolean(truncation?.truncated) &&
!truncation?.firstLineExceedsLimit &&
(truncation?.outputLines ?? 0) > 0 &&
page < MAX_ADAPTIVE_READ_PAGES - 1;
const pageText = canContinue ? stripReadContinuationNotice(rawText) : rawText;
const delimiter = aggregatedText ? "\n\n" : "";
const nextBytes = Buffer.byteLength(`${delimiter}${pageText}`, "utf-8");
if (aggregatedText && aggregatedBytes + nextBytes > params.maxBytes) {
capped = true;
continuationOffset = nextOffset;
break;
}
aggregatedText += `${delimiter}${pageText}`;
aggregatedBytes += nextBytes;
if (!canContinue || !truncation) {
return withToolResultText(pageResult, aggregatedText);
}
nextOffset += truncation.outputLines;
continuationOffset = nextOffset;
if (aggregatedBytes >= params.maxBytes) {
capped = true;
break;
}
}
if (!firstResult) {
return await params.base.execute(params.toolCallId, params.args, params.signal);
}
let finalText = aggregatedText;
if (capped && continuationOffset) {
finalText += `\n\n[Read output capped at ${formatBytes(params.maxBytes)} for this call. Use offset=${continuationOffset} to continue.]`;
}
return withToolResultText(firstResult, finalText);
}
function rewriteReadImageHeader(text: string, mimeType: string): string {
// pi-coding-agent uses: "Read image file [image/png]"
if (text.startsWith("Read image file [") && text.endsWith("]")) {
return `Read image file [${mimeType}]`;
}
return text;
}
async function normalizeReadImageResult(
result: AgentToolResult<unknown>,
filePath: string,
): Promise<AgentToolResult<unknown>> {
const content = Array.isArray(result.content) ? result.content : [];
const image = content.find(
(b): b is ImageContentBlock =>
!!b &&
typeof b === "object" &&
(b as { type?: unknown }).type === "image" &&
typeof (b as { data?: unknown }).data === "string" &&
typeof (b as { mimeType?: unknown }).mimeType === "string",
);
if (!image) {
return result;
}
if (!image.data.trim()) {
throw new Error(`read: image payload is empty (${filePath})`);
}
const sniffed = await sniffMimeFromBase64(image.data);
if (!sniffed) {
return result;
}
if (!sniffed.startsWith("image/")) {
throw new Error(
`read: file looks like ${sniffed} but was treated as ${image.mimeType} (${filePath})`,
);
}
if (sniffed === image.mimeType) {
return result;
}
const nextContent = content.map((block) => {
if (block && typeof block === "object" && (block as { type?: unknown }).type === "image") {
const b = block as ImageContentBlock & { mimeType: string };
return { ...b, mimeType: sniffed } satisfies ImageContentBlock;
}
if (
block &&
typeof block === "object" &&
(block as { type?: unknown }).type === "text" &&
typeof (block as { text?: unknown }).text === "string"
) {
const b = block as TextContentBlock & { text: string };
return {
...b,
text: rewriteReadImageHeader(b.text, sniffed),
} satisfies TextContentBlock;
}
return block;
});
return { ...result, content: nextContent };
}
type RequiredParamGroup = {
keys: readonly string[];
allowEmpty?: boolean;
label?: string;
};
const RETRY_GUIDANCE_SUFFIX = " Supply correct parameters before retrying.";
function parameterValidationError(message: string): Error {
return new Error(`${message}.${RETRY_GUIDANCE_SUFFIX}`);
}
export const CLAUDE_PARAM_GROUPS = {
read: [{ keys: ["path", "file_path"], label: "path (path or file_path)" }],
write: [
{ keys: ["path", "file_path"], label: "path (path or file_path)" },
{ keys: ["content"], label: "content" },
],
edit: [
{ keys: ["path", "file_path"], label: "path (path or file_path)" },
{
keys: ["oldText", "old_string"],
label: "oldText (oldText or old_string)",
},
{
keys: ["newText", "new_string"],
label: "newText (newText or new_string)",
allowEmpty: true,
},
],
} as const;
function extractStructuredText(value: unknown, depth = 0): string | undefined {
if (depth > 6) {
return undefined;
}
if (typeof value === "string") {
return value;
}
if (Array.isArray(value)) {
const parts = value
.map((entry) => extractStructuredText(entry, depth + 1))
.filter((entry): entry is string => typeof entry === "string");
return parts.length > 0 ? parts.join("") : undefined;
}
if (!value || typeof value !== "object") {
return undefined;
}
const record = value as Record<string, unknown>;
if (typeof record.text === "string") {
return record.text;
}
if (typeof record.content === "string") {
return record.content;
}
if (Array.isArray(record.content)) {
return extractStructuredText(record.content, depth + 1);
}
if (Array.isArray(record.parts)) {
return extractStructuredText(record.parts, depth + 1);
}
if (typeof record.value === "string" && record.value.length > 0) {
const type = typeof record.type === "string" ? record.type.toLowerCase() : "";
const kind = typeof record.kind === "string" ? record.kind.toLowerCase() : "";
if (type.includes("text") || kind === "text") {
return record.value;
}
}
return undefined;
}
function normalizeTextLikeParam(record: Record<string, unknown>, key: string) {
const value = record[key];
if (typeof value === "string") {
return;
}
const extracted = extractStructuredText(value);
if (typeof extracted === "string") {
record[key] = extracted;
}
}
// Normalize tool parameters from Claude Code conventions to pi-coding-agent conventions.
// Claude Code uses file_path/old_string/new_string while pi-coding-agent uses path/oldText/newText.
// This prevents models trained on Claude Code from getting stuck in tool-call loops.
export function normalizeToolParams(params: unknown): Record<string, unknown> | undefined {
if (!params || typeof params !== "object") {
return undefined;
}
const record = params as Record<string, unknown>;
const normalized = { ...record };
// file_path → path (read, write, edit)
if ("file_path" in normalized && !("path" in normalized)) {
normalized.path = normalized.file_path;
delete normalized.file_path;
}
// old_string → oldText (edit)
if ("old_string" in normalized && !("oldText" in normalized)) {
normalized.oldText = normalized.old_string;
delete normalized.old_string;
}
// new_string → newText (edit)
if ("new_string" in normalized && !("newText" in normalized)) {
normalized.newText = normalized.new_string;
delete normalized.new_string;
}
// Some providers/models emit text payloads as structured blocks instead of raw strings.
// Normalize these for write/edit so content matching and writes stay deterministic.
normalizeTextLikeParam(normalized, "content");
normalizeTextLikeParam(normalized, "oldText");
normalizeTextLikeParam(normalized, "newText");
return normalized;
}
export function patchToolSchemaForClaudeCompatibility(tool: AnyAgentTool): AnyAgentTool {
const schema =
tool.parameters && typeof tool.parameters === "object"
? (tool.parameters as Record<string, unknown>)
: undefined;
if (!schema || !schema.properties || typeof schema.properties !== "object") {
return tool;
}
const properties = { ...(schema.properties as Record<string, unknown>) };
const required = Array.isArray(schema.required)
? schema.required.filter((key): key is string => typeof key === "string")
: [];
let changed = false;
const aliasPairs: Array<{ original: string; alias: string }> = [
{ original: "path", alias: "file_path" },
{ original: "oldText", alias: "old_string" },
{ original: "newText", alias: "new_string" },
];
for (const { original, alias } of aliasPairs) {
if (!(original in properties)) {
continue;
}
if (!(alias in properties)) {
properties[alias] = properties[original];
changed = true;
}
const idx = required.indexOf(original);
if (idx !== -1) {
required.splice(idx, 1);
changed = true;
}
}
if (!changed) {
return tool;
}
return {
...tool,
parameters: {
...schema,
properties,
required,
},
};
}
export function assertRequiredParams(
record: Record<string, unknown> | undefined,
groups: readonly RequiredParamGroup[],
toolName: string,
): void {
if (!record || typeof record !== "object") {
throw parameterValidationError(`Missing parameters for ${toolName}`);
}
const missingLabels: string[] = [];
for (const group of groups) {
const satisfied = group.keys.some((key) => {
if (!(key in record)) {
return false;
}
const value = record[key];
if (typeof value !== "string") {
return false;
}
if (group.allowEmpty) {
return true;
}
return value.trim().length > 0;
});
if (!satisfied) {
const label = group.label ?? group.keys.join(" or ");
missingLabels.push(label);
}
}
if (missingLabels.length > 0) {
const joined = missingLabels.join(", ");
const noun = missingLabels.length === 1 ? "parameter" : "parameters";
throw parameterValidationError(`Missing required ${noun}: ${joined}`);
}
}
// Generic wrapper to normalize parameters for any tool
export function wrapToolParamNormalization(
tool: AnyAgentTool,
requiredParamGroups?: readonly RequiredParamGroup[],
): AnyAgentTool {
const patched = patchToolSchemaForClaudeCompatibility(tool);
return {
...patched,
execute: async (toolCallId, params, signal, onUpdate) => {
const normalized = normalizeToolParams(params);
const record =
normalized ??
(params && typeof params === "object" ? (params as Record<string, unknown>) : undefined);
if (requiredParamGroups?.length) {
assertRequiredParams(record, requiredParamGroups, tool.name);
}
return tool.execute(toolCallId, normalized ?? params, signal, onUpdate);
},
};
}
export function wrapToolWorkspaceRootGuard(tool: AnyAgentTool, root: string): AnyAgentTool {
return wrapToolWorkspaceRootGuardWithOptions(tool, root);
}
function mapContainerPathToWorkspaceRoot(params: {
filePath: string;
root: string;
containerWorkdir?: string;
}): string {
const containerWorkdir = params.containerWorkdir?.trim();
if (!containerWorkdir) {
return params.filePath;
}
const normalizedWorkdir = containerWorkdir.replace(/\\/g, "/").replace(/\/+$/, "");
if (!normalizedWorkdir.startsWith("/")) {
return params.filePath;
}
if (!normalizedWorkdir) {
return params.filePath;
}
let candidate = params.filePath.startsWith("@") ? params.filePath.slice(1) : params.filePath;
if (/^file:\/\//i.test(candidate)) {
try {
candidate = fileURLToPath(candidate);
} catch {
try {
const parsed = new URL(candidate);
if (parsed.protocol !== "file:") {
return params.filePath;
}
candidate = decodeURIComponent(parsed.pathname || "");
if (!candidate.startsWith("/")) {
return params.filePath;
}
} catch {
return params.filePath;
}
}
}
const normalizedCandidate = candidate.replace(/\\/g, "/");
if (normalizedCandidate === normalizedWorkdir) {
return path.resolve(params.root);
}
const prefix = `${normalizedWorkdir}/`;
if (!normalizedCandidate.startsWith(prefix)) {
return candidate;
}
const relative = normalizedCandidate.slice(prefix.length);
if (!relative) {
return path.resolve(params.root);
}
return path.resolve(params.root, ...relative.split("/").filter(Boolean));
}
export function wrapToolWorkspaceRootGuardWithOptions(
tool: AnyAgentTool,
root: string,
options?: {
containerWorkdir?: string;
},
): AnyAgentTool {
return {
...tool,
execute: async (toolCallId, args, signal, onUpdate) => {
const normalized = normalizeToolParams(args);
const record =
normalized ??
(args && typeof args === "object" ? (args as Record<string, unknown>) : undefined);
const filePath = record?.path;
if (typeof filePath === "string" && filePath.trim()) {
const sandboxPath = mapContainerPathToWorkspaceRoot({
filePath,
root,
containerWorkdir: options?.containerWorkdir,
});
await assertSandboxPath({ filePath: sandboxPath, cwd: root, root });
}
return tool.execute(toolCallId, normalized ?? args, signal, onUpdate);
},
};
}
type SandboxToolParams = {
root: string;
bridge: SandboxFsBridge;
modelContextWindowTokens?: number;
imageSanitization?: ImageSanitizationLimits;
};
export function createSandboxedReadTool(params: SandboxToolParams) {
const base = createReadTool(params.root, {
operations: createSandboxReadOperations(params),
}) as unknown as AnyAgentTool;
return createOpenClawReadTool(base, {
modelContextWindowTokens: params.modelContextWindowTokens,
imageSanitization: params.imageSanitization,
});
}
export function createSandboxedWriteTool(params: SandboxToolParams) {
const base = createWriteTool(params.root, {
operations: createSandboxWriteOperations(params),
}) as unknown as AnyAgentTool;
return wrapToolParamNormalization(base, CLAUDE_PARAM_GROUPS.write);
}
export function createSandboxedEditTool(params: SandboxToolParams) {
const base = createEditTool(params.root, {
operations: createSandboxEditOperations(params),
}) as unknown as AnyAgentTool;
return wrapToolParamNormalization(base, CLAUDE_PARAM_GROUPS.edit);
}
export function createHostWorkspaceWriteTool(root: string, options?: { workspaceOnly?: boolean }) {
const base = createWriteTool(root, {
operations: createHostWriteOperations(root, options),
}) as unknown as AnyAgentTool;
return wrapToolParamNormalization(base, CLAUDE_PARAM_GROUPS.write);
}
export function createHostWorkspaceEditTool(root: string, options?: { workspaceOnly?: boolean }) {
const base = createEditTool(root, {
operations: createHostEditOperations(root, options),
}) as unknown as AnyAgentTool;
return wrapToolParamNormalization(base, CLAUDE_PARAM_GROUPS.edit);
}
export function createOpenClawReadTool(
base: AnyAgentTool,
options?: OpenClawReadToolOptions,
): AnyAgentTool {
const patched = patchToolSchemaForClaudeCompatibility(base);
return {
...patched,
execute: async (toolCallId, params, signal) => {
const normalized = normalizeToolParams(params);
const record =
normalized ??
(params && typeof params === "object" ? (params as Record<string, unknown>) : undefined);
assertRequiredParams(record, CLAUDE_PARAM_GROUPS.read, base.name);
const result = await executeReadWithAdaptivePaging({
base,
toolCallId,
args: (normalized ?? params ?? {}) as Record<string, unknown>,
signal,
maxBytes: resolveAdaptiveReadMaxBytes(options),
});
const filePath = typeof record?.path === "string" ? String(record.path) : "<unknown>";
const strippedDetailsResult = stripReadTruncationContentDetails(result);
const normalizedResult = await normalizeReadImageResult(strippedDetailsResult, filePath);
return sanitizeToolResultImages(
normalizedResult,
`read:${filePath}`,
options?.imageSanitization,
);
},
};
}
function createSandboxReadOperations(params: SandboxToolParams) {
return {
readFile: (absolutePath: string) =>
params.bridge.readFile({ filePath: absolutePath, cwd: params.root }),
access: async (absolutePath: string) => {
const stat = await params.bridge.stat({ filePath: absolutePath, cwd: params.root });
if (!stat) {
throw createFsAccessError("ENOENT", absolutePath);
}
},
detectImageMimeType: async (absolutePath: string) => {
const buffer = await params.bridge.readFile({ filePath: absolutePath, cwd: params.root });
const mime = await detectMime({ buffer, filePath: absolutePath });
return mime && mime.startsWith("image/") ? mime : undefined;
},
} as const;
}
function createSandboxWriteOperations(params: SandboxToolParams) {
return {
mkdir: async (dir: string) => {
await params.bridge.mkdirp({ filePath: dir, cwd: params.root });
},
writeFile: async (absolutePath: string, content: string) => {
await params.bridge.writeFile({ filePath: absolutePath, cwd: params.root, data: content });
},
} as const;
}
function createSandboxEditOperations(params: SandboxToolParams) {
return {
readFile: (absolutePath: string) =>
params.bridge.readFile({ filePath: absolutePath, cwd: params.root }),
writeFile: (absolutePath: string, content: string) =>
params.bridge.writeFile({ filePath: absolutePath, cwd: params.root, data: content }),
access: async (absolutePath: string) => {
const stat = await params.bridge.stat({ filePath: absolutePath, cwd: params.root });
if (!stat) {
throw createFsAccessError("ENOENT", absolutePath);
}
},
} as const;
}
function createHostWriteOperations(root: string, options?: { workspaceOnly?: boolean }) {
const workspaceOnly = options?.workspaceOnly ?? false;
if (!workspaceOnly) {
// When workspaceOnly is false, allow writes anywhere on the host
return {
mkdir: async (dir: string) => {
const resolved = path.resolve(dir);
await fs.mkdir(resolved, { recursive: true });
},
writeFile: async (absolutePath: string, content: string) => {
const resolved = path.resolve(absolutePath);
const dir = path.dirname(resolved);
await fs.mkdir(dir, { recursive: true });
await fs.writeFile(resolved, content, "utf-8");
},
} as const;
}
// When workspaceOnly is true, enforce workspace boundary
return {
mkdir: async (dir: string) => {
const relative = toRelativePathInRoot(root, dir, { allowRoot: true });
const resolved = relative ? path.resolve(root, relative) : path.resolve(root);
await assertSandboxPath({ filePath: resolved, cwd: root, root });
await fs.mkdir(resolved, { recursive: true });
},
writeFile: async (absolutePath: string, content: string) => {
const relative = toRelativePathInRoot(root, absolutePath);
await writeFileWithinRoot({
rootDir: root,
relativePath: relative,
data: content,
mkdir: true,
});
},
} as const;
}
function createHostEditOperations(root: string, options?: { workspaceOnly?: boolean }) {
const workspaceOnly = options?.workspaceOnly ?? false;
if (!workspaceOnly) {
// When workspaceOnly is false, allow edits anywhere on the host
return {
readFile: async (absolutePath: string) => {
const resolved = path.resolve(absolutePath);
return await fs.readFile(resolved);
},
writeFile: async (absolutePath: string, content: string) => {
const resolved = path.resolve(absolutePath);
const dir = path.dirname(resolved);
await fs.mkdir(dir, { recursive: true });
await fs.writeFile(resolved, content, "utf-8");
},
access: async (absolutePath: string) => {
const resolved = path.resolve(absolutePath);
await fs.access(resolved);
},
} as const;
}
// When workspaceOnly is true, enforce workspace boundary
return {
readFile: async (absolutePath: string) => {
const relative = toRelativePathInRoot(root, absolutePath);
const safeRead = await readFileWithinRoot({
rootDir: root,
relativePath: relative,
});
return safeRead.buffer;
},
writeFile: async (absolutePath: string, content: string) => {
const relative = toRelativePathInRoot(root, absolutePath);
await writeFileWithinRoot({
rootDir: root,
relativePath: relative,
data: content,
mkdir: true,
});
},
access: async (absolutePath: string) => {
let relative: string;
try {
relative = toRelativePathInRoot(root, absolutePath);
} catch {
// Path escapes workspace root. Don't throw here the upstream
// library replaces any `access` error with a misleading "File not
// found" message. By returning silently the subsequent `readFile`
// call will throw the same "Path escapes workspace root" error
// through a code-path that propagates the original message.
return;
}
try {
const opened = await openFileWithinRoot({
rootDir: root,
relativePath: relative,
});
await opened.handle.close().catch(() => {});
} catch (error) {
if (error instanceof SafeOpenError && error.code === "not-found") {
throw createFsAccessError("ENOENT", absolutePath);
}
if (error instanceof SafeOpenError && error.code === "outside-workspace") {
// Don't throw here see the comment above about the upstream
// library swallowing access errors as "File not found".
return;
}
throw error;
}
},
} as const;
}
function toRelativePathInRoot(
root: string,
candidate: string,
options?: { allowRoot?: boolean },
): string {
const rootResolved = path.resolve(root);
const resolved = path.resolve(candidate);
const relative = path.relative(rootResolved, resolved);
if (relative === "" || relative === ".") {
if (options?.allowRoot) {
return "";
}
throw new Error(`Path escapes workspace root: ${candidate}`);
}
if (relative.startsWith("..") || path.isAbsolute(relative)) {
throw new Error(`Path escapes workspace root: ${candidate}`);
}
return relative;
}
function createFsAccessError(code: string, filePath: string): NodeJS.ErrnoException {
const error = new Error(`Sandbox FS error (${code}): ${filePath}`) as NodeJS.ErrnoException;
error.code = code;
return error;
}