src / fsTools.ts
import * as path from "node:path";
import * as fs from "node:fs/promises";
import * as crypto from "node:crypto";
import { resolveSafe, BACKUP_DIR_NAME, isInsideBackupDir, PathError } from "./pathGuard";
const SKIP_DIRS = new Set(["node_modules", "dist", "target", ".git", BACKUP_DIR_NAME]);
const PREVIEW_LINES = 8;
const PREVIEW_BYTES = 1024;
const TOKEN_SECRET = crypto.randomBytes(32);
function contentToken(absPath: string, content: string): string {
return crypto
.createHmac("sha256", TOKEN_SECRET)
.update(absPath + "\0" + content)
.digest("hex")
.slice(0, 16);
}
function firstLines(s: string, n: number, byteCap: number): string {
let out = s.split("\n").slice(0, n).join("\n");
if (Buffer.byteLength(out, "utf-8") > byteCap) {
out = out.slice(0, byteCap) + "…";
}
return out;
}
export async function readFileImpl(
args: { path: string },
ctx: { allowedRoots: string[]; maxFileSizeKb: number },
) {
const { abs } = await resolveSafe(args.path, ctx.allowedRoots);
const stat = await fs.stat(abs);
if (!stat.isFile()) return { error: `"${abs}" is not a regular file.` };
if (stat.size > ctx.maxFileSizeKb * 1024) {
return {
error: `File is ${stat.size} bytes, larger than the ${ctx.maxFileSizeKb} KB cap. Increase the cap in the plugin config or read a smaller file.`,
};
}
const content = await fs.readFile(abs, "utf-8");
return { path: abs, size: stat.size, content };
}
export async function readDirImpl(
args: { path?: string },
ctx: { allowedRoots: string[] },
) {
let abs: string;
if (!args.path) {
if (ctx.allowedRoots.length === 0) {
return { error: "No allowed roots are configured." };
}
if (ctx.allowedRoots.length > 1) {
return {
path: null,
note: "Multiple allowed roots are configured. Listing the roots themselves; pass `path` to descend into one.",
entries: ctx.allowedRoots.map((r) => ({ name: r, type: "dir" as const })),
};
}
abs = ctx.allowedRoots[0];
} else {
({ abs } = await resolveSafe(args.path, ctx.allowedRoots));
}
const stat = await fs.stat(abs);
if (!stat.isDirectory()) return { error: `"${abs}" is not a directory.` };
const entries = await fs.readdir(abs, { withFileTypes: true });
return {
path: abs,
entries: entries
.filter((e) => e.name !== BACKUP_DIR_NAME)
.map((e) => ({
name: e.name,
type: e.isDirectory()
? "dir"
: e.isFile()
? "file"
: e.isSymbolicLink()
? "symlink"
: "other",
}))
.sort((a, b) => {
if (a.type !== b.type) return a.type === "dir" ? -1 : 1;
return a.name.localeCompare(b.name);
}),
};
}
export async function writeFileImpl(
args: { path: string; content: string; confirm?: boolean; confirm_token?: string },
ctx: { allowedRoots: string[] },
) {
const { abs, root } = await resolveSafe(args.path, ctx.allowedRoots);
if (isInsideBackupDir(abs, root)) {
return { error: `Refusing to write inside the backup directory (${BACKUP_DIR_NAME}).` };
}
const expectedToken = contentToken(abs, args.content);
let exists = false;
let oldSize = 0;
let oldContent: string | null = null;
try {
const st = await fs.stat(abs);
if (st.isFile()) {
exists = true;
oldSize = st.size;
oldContent = await fs.readFile(abs, "utf-8").catch(() => null);
} else {
return { error: `"${abs}" exists but is not a regular file.` };
}
} catch {
/* doesn't exist yet */
}
if (!args.confirm) {
return {
status: "PREVIEW_ONLY — FILE NOT WRITTEN YET",
written: false,
preview: {
path: abs,
exists,
old_size: oldSize,
new_size: Buffer.byteLength(args.content, "utf-8"),
old_first_lines: oldContent ? firstLines(oldContent, PREVIEW_LINES, PREVIEW_BYTES) : null,
new_first_lines: firstLines(args.content, PREVIEW_LINES, PREVIEW_BYTES),
},
confirm_token: expectedToken,
next_step:
"The file has NOT been written. Show the user the path and a short summary of the new content above, then ask if they agree. " +
"Only after the user explicitly agrees, call write_file AGAIN with the SAME path and content, plus confirm=true and confirm_token=<this token>. " +
"Only the second call writes the file. Do NOT tell the user the file has been created until you receive a response with written=true.",
};
}
if (args.confirm_token !== expectedToken) {
return {
error:
"confirm_token does not match (path, content). The content you passed differs from what was previewed. " +
"Call write_file with confirm=false to get a fresh preview and token.",
};
}
let backupPath: string | null = null;
if (exists) {
const rel = path.relative(root, abs);
const ts = new Date().toISOString().replace(/[:.]/g, "-");
backupPath = path.join(root, BACKUP_DIR_NAME, `${rel}.${ts}.bak`);
await fs.mkdir(path.dirname(backupPath), { recursive: true });
await fs.copyFile(abs, backupPath);
}
await fs.mkdir(path.dirname(abs), { recursive: true });
await fs.writeFile(abs, args.content, "utf-8");
return {
written: true,
path: abs,
bytes: Buffer.byteLength(args.content, "utf-8"),
backup_path: backupPath,
};
}
export async function grepImpl(
args: {
pattern: string;
path?: string;
ignore_case?: boolean;
max_results?: number;
},
ctx: { allowedRoots: string[]; maxGrepResults: number; maxGrepFiles: number },
) {
let regex: RegExp;
try {
regex = new RegExp(args.pattern, args.ignore_case ? "i" : "");
} catch (e) {
return { error: `Invalid regex: ${e instanceof Error ? e.message : String(e)}` };
}
const startRoots = args.path
? [(await resolveSafe(args.path, ctx.allowedRoots)).abs]
: ctx.allowedRoots;
const limit = Math.min(args.max_results ?? ctx.maxGrepResults, ctx.maxGrepResults);
const fileLimit = ctx.maxGrepFiles;
const matches: Array<{ file: string; line: number; content: string }> = [];
let filesScanned = 0;
let truncated = false;
async function walk(dir: string): Promise<void> {
if (matches.length >= limit || filesScanned >= fileLimit) return;
let entries;
try {
entries = await fs.readdir(dir, { withFileTypes: true });
} catch {
return;
}
for (const entry of entries) {
if (matches.length >= limit || filesScanned >= fileLimit) {
truncated = true;
return;
}
if (SKIP_DIRS.has(entry.name)) continue;
if (entry.name.startsWith(".") && entry.name !== ".env" && entry.name !== ".gitignore") continue;
const full = path.join(dir, entry.name);
if (entry.isDirectory()) {
await walk(full);
} else if (entry.isFile()) {
filesScanned++;
let content: string;
try {
const buf = await fs.readFile(full);
if (buf.includes(0)) continue;
content = buf.toString("utf-8");
} catch {
continue;
}
const lines = content.split("\n");
for (let i = 0; i < lines.length; i++) {
if (regex.test(lines[i])) {
matches.push({ file: full, line: i + 1, content: lines[i].slice(0, 300) });
if (matches.length >= limit) {
truncated = true;
return;
}
}
}
}
}
}
for (const r of startRoots) {
if (matches.length >= limit) break;
const stat = await fs.stat(r).catch(() => null);
if (!stat) continue;
if (stat.isDirectory()) await walk(r);
else if (stat.isFile()) {
filesScanned++;
try {
const buf = await fs.readFile(r);
if (!buf.includes(0)) {
const content = buf.toString("utf-8");
const lines = content.split("\n");
for (let i = 0; i < lines.length; i++) {
if (regex.test(lines[i])) {
matches.push({ file: r, line: i + 1, content: lines[i].slice(0, 300) });
if (matches.length >= limit) {
truncated = true;
break;
}
}
}
}
} catch {
/* ignore */
}
}
}
return {
pattern: args.pattern,
matches,
files_scanned: filesScanned,
truncated,
};
}
export { PathError };