Project Files
src / tools / fileTools.ts
/**
* @file fileTools.ts
* File system operations: read, write, list, search, move, copy, delete, etc.
*/
import { text, tool, type Tool } from "@lmstudio/sdk";
import { rm, writeFile, readdir, readFile, stat, mkdir, rename, copyFile } from "fs/promises";
import { join, dirname, resolve, basename } from "path";
import { z } from "zod";
import { validatePath, type ToolContext } from "./shared";
export function createFileTools(ctx: ToolContext): Tool[] {
const tools: Tool[] = [];
tools.push(tool({
name: "save_file",
description: text`
Save content to a specified file in the current working directory.
This tool returns the full path to the saved file. You should then
output this full path to the user.
`,
parameters: {
file_name: z.string(),
content: z.string(),
},
implementation: async ({ file_name, content }) => {
if (!file_name || file_name.trim().length === 0) return { error: "Filename cannot be empty" };
if (/[\*\?<>|"]/.test(file_name)) return { error: "Filename contains invalid characters" };
const filePath = validatePath(ctx.cwd, file_name);
await mkdir(dirname(filePath), { recursive: true });
await writeFile(filePath, content, "utf-8");
// Track large files for replace_text_in_file hints
if (content.length > 10000) {
if (!ctx.fullState.largeFilesSaved) ctx.fullState.largeFilesSaved = [];
if (!ctx.fullState.largeFilesSaved.includes(filePath)) {
ctx.fullState.largeFilesSaved.push(filePath);
await ctx.saveState();
}
}
return { success: true, path: filePath };
},
}));
tools.push(tool({
name: "replace_text_in_file",
description: text`
Replace a specific string in a file with a new string.
Useful for making small edits without rewriting the entire file.
Ensure 'old_string' matches exactly (including whitespace) or the replace will fail.
`,
parameters: {
file_name: z.string(),
old_string: z.string().describe("The exact text to replace. Must be unique in the file."),
new_string: z.string().describe("The text to insert in place of old_string."),
},
implementation: async ({ file_name, old_string, new_string }) => {
try {
if (!old_string || old_string.length === 0) return { error: "old_string cannot be empty" };
const filePath = validatePath(ctx.cwd, file_name);
const content = await readFile(filePath, "utf-8");
if (!content.includes(old_string)) return { error: "Could not find the exact 'old_string' in the file. Please check whitespace and indentation." };
const occurrenceCount = content.split(old_string).length - 1;
if (occurrenceCount > 1) return { error: `Found ${occurrenceCount} occurrences of 'old_string'. Please provide more context (surrounding lines) in 'old_string' to make it unique.` };
await writeFile(filePath, content.replace(old_string, new_string), "utf-8");
return { success: true, message: `Successfully replaced text in ${file_name}` };
} catch (e) {
return { error: `Failed to replace text: ${e instanceof Error ? e.message : String(e)}` };
}
},
}));
tools.push(tool({
name: "list_directory",
description: "List files and directories with metadata (type, size, modified date). Use to understand project structure.",
parameters: {
path: z.string().optional().describe("Subdirectory path relative to CWD. Defaults to current working directory."),
},
implementation: async ({ path }) => {
const targetPath = path ? validatePath(ctx.cwd, path) : ctx.cwd;
const entries = await readdir(targetPath);
const items = await Promise.all(
entries.map(async (name) => {
try {
const s = await stat(join(targetPath, name));
return { name, type: s.isDirectory() ? "dir" : "file", size: s.isDirectory() ? undefined : s.size, modified: s.mtime.toISOString().slice(0, 16) };
} catch {
return { name, type: "unknown" as const };
}
}),
);
return { cwd: ctx.cwd, path: targetPath, count: items.length, entries: items };
},
}));
tools.push(tool({
name: "read_file",
description: "Read the content of a file. Supports optional line range for large files (start_line/end_line).",
parameters: {
file_name: z.string().describe("File path relative to CWD."),
start_line: z.number().int().min(1).optional().describe("First line to read (1-based). Omit to start from beginning."),
end_line: z.number().int().min(1).optional().describe("Last line to read (1-based, inclusive). Omit to read to end."),
},
implementation: async ({ file_name, start_line, end_line }) => {
const filePath = validatePath(ctx.cwd, file_name);
const stats = await stat(filePath);
if (stats.size > 10_000_000) return { error: "File too large (>10MB). Use start_line/end_line to read a portion." };
const buffer = await readFile(filePath);
const checkBuffer = buffer.subarray(0, Math.min(buffer.length, 1024));
if (checkBuffer.includes(0)) return { error: "File appears to be binary and cannot be read as text." };
const fullContent = buffer.toString("utf-8");
const allLines = fullContent.split("\n");
const totalLines = allLines.length;
if (start_line || end_line) {
const from = Math.max(1, start_line ?? 1);
const to = Math.min(totalLines, end_line ?? totalLines);
return { content: allLines.slice(from - 1, to).join("\n"), lines: { from, to, total: totalLines } };
}
if (fullContent.length > 6000) {
return { content: fullContent.substring(0, 6000), truncated: true, total_lines: totalLines, hint: "File truncated at 6000 chars. Use start_line/end_line to read specific sections." };
}
return { content: fullContent, total_lines: totalLines };
},
}));
const SKIP_DIRS = new Set(["node_modules", ".git", "dist", "build", ".next", "__pycache__", ".venv", "venv", ".lmstudio"]);
tools.push(tool({
name: "grep_files",
description: "Search for a text pattern across files in the project. Returns matching lines with file paths and line numbers.",
parameters: {
pattern: z.string().describe("Text or regex pattern to search for."),
path: z.string().optional().describe("Subdirectory to search in (relative to CWD). Defaults to entire project."),
file_glob: z.string().optional().describe("File glob filter, e.g. '*.ts', '*.py'. Defaults to all files."),
max_results: z.number().int().min(1).max(100).optional().describe("Maximum number of matching lines to return. Default: 30."),
},
implementation: async ({ pattern, path, file_glob, max_results }) => {
const searchDir = path ? validatePath(ctx.cwd, path) : ctx.cwd;
const limit = max_results ?? 30;
const matchingLines: Array<{ file: string; line: number; text: string }> = [];
const MAX_FILE_SIZE = 1_000_000;
async function walkDir(dir: string) {
if (matchingLines.length >= limit) return;
let entries: string[];
try { entries = await readdir(dir); } catch { return; }
for (const entry of entries) {
if (matchingLines.length >= limit) return;
const fullPath = join(dir, entry);
try {
const s = await stat(fullPath);
if (s.isDirectory()) {
if (!SKIP_DIRS.has(entry)) await walkDir(fullPath);
} else if (s.isFile() && s.size < MAX_FILE_SIZE) {
if (file_glob) {
if (file_glob.startsWith("*.")) {
const ext = file_glob.slice(1);
const entryExt = entry.lastIndexOf(".") >= 0 ? entry.slice(entry.lastIndexOf(".")) : "";
if (entryExt !== ext) continue;
} else {
const starIdx = file_glob.indexOf("*");
if (starIdx >= 0) {
const prefix = file_glob.slice(0, starIdx);
const suffix = file_glob.slice(starIdx + 1);
if (!entry.startsWith(prefix) || !entry.endsWith(suffix)) continue;
} else {
if (entry !== file_glob) continue;
}
}
}
const buf = await readFile(fullPath);
if (buf.subarray(0, 512).includes(0)) continue;
const content = buf.toString("utf-8");
const regex = new RegExp(pattern, "gi");
const lines = content.split("\n");
for (let i = 0; i < lines.length; i++) {
if (regex.test(lines[i])) {
matchingLines.push({ file: fullPath.replace(ctx.cwd + "/", ""), line: i + 1, text: lines[i].trim().substring(0, 200) });
if (matchingLines.length >= limit) return;
}
regex.lastIndex = 0;
}
}
} catch { /* skip */ }
}
}
await walkDir(searchDir);
return {
pattern, matches: matchingLines.length, results: matchingLines,
...(matchingLines.length >= limit ? { truncated: true, hint: `Showing first ${limit} matches. Use file_glob or path to narrow search.` } : {}),
};
},
}));
tools.push(tool({
name: "get_project_context",
description: "Get a quick snapshot of the project: directory tree (2 levels), package.json summary, and key config files.",
parameters: {},
implementation: async () => {
const result: Record<string, unknown> = { cwd: ctx.cwd };
async function listTree(dir: string, depth: number): Promise<string[]> {
if (depth > 2) return [];
let entries: string[];
try { entries = await readdir(dir); } catch { return []; }
const lines: string[] = [];
for (const entry of entries) {
if (SKIP_DIRS.has(entry)) continue;
const full = join(dir, entry);
try {
const s = await stat(full);
const prefix = " ".repeat(depth);
if (s.isDirectory()) { lines.push(`${prefix}${entry}/`); lines.push(...(await listTree(full, depth + 1))); }
else lines.push(`${prefix}${entry}`);
} catch { /* skip */ }
}
return lines;
}
const tree = await listTree(ctx.cwd, 0);
result.tree = tree.slice(0, 100).join("\n") + (tree.length > 100 ? "\n... (truncated)" : "");
try {
const pkg = JSON.parse(await readFile(join(ctx.cwd, "package.json"), "utf-8"));
result.package = { name: pkg.name, version: pkg.version, scripts: pkg.scripts ? Object.keys(pkg.scripts) : [], deps: pkg.dependencies ? Object.keys(pkg.dependencies).length : 0, devDeps: pkg.devDependencies ? Object.keys(pkg.devDependencies).length : 0 };
} catch { /* no package.json */ }
const configFiles = ["tsconfig.json", ".eslintrc.js", ".eslintrc.json", "vite.config.ts", "webpack.config.js", "pyproject.toml", "requirements.txt", "Cargo.toml", "go.mod", "Makefile", "Dockerfile", ".env.example"];
const foundConfigs: string[] = [];
for (const cf of configFiles) { try { await stat(join(ctx.cwd, cf)); foundConfigs.push(cf); } catch { /* not found */ } }
if (foundConfigs.length > 0) result.config_files = foundConfigs;
try {
const gitHead = await readFile(join(ctx.cwd, ".git", "HEAD"), "utf-8");
const branchMatch = gitHead.match(/ref: refs\/heads\/(.+)/);
if (branchMatch) result.git_branch = branchMatch[1].trim();
} catch { /* not a git repo */ }
return result;
},
}));
tools.push(tool({
name: "make_directory",
description: "Create a new directory in the current working directory.",
parameters: { directory_name: z.string() },
implementation: async ({ directory_name }) => {
const dirPath = validatePath(ctx.cwd, directory_name);
await mkdir(dirPath, { recursive: true });
return { success: true, path: dirPath };
},
}));
tools.push(tool({
name: "delete_path",
description: "Delete a file or directory in the current working directory. Be careful!",
parameters: { path: z.string() },
implementation: async ({ path }) => {
const targetPath = validatePath(ctx.cwd, path);
await rm(targetPath, { recursive: true, force: true });
return { success: true, path: targetPath };
},
}));
tools.push(tool({
name: "delete_files_by_pattern",
description: "Delete multiple files in the current directory that match a regex pattern.",
parameters: { pattern: z.string().describe("Regex pattern to match filenames (e.g., '^auto_gen_.*\\.txt$')") },
implementation: async ({ pattern }) => {
try {
if (pattern.length > 100) return { error: "Pattern too complex (max 100 characters)" };
const regex = new RegExp(pattern);
const start = Date.now();
regex.test("safe_test_string_for_redos_check_1234567890_safe_test_string_for_redos_check_1234567890");
if (Date.now() - start > 100) return { error: "Pattern is too complex or slow (ReDoS protection)." };
const files = await readdir(ctx.cwd);
const deleted: string[] = [];
for (const file of files) {
if (regex.test(file)) { await rm(join(ctx.cwd, file), { force: true }); deleted.push(file); }
}
return { deleted_count: deleted.length, deleted_files: deleted };
} catch (e) {
return { error: `Failed to delete files: ${e instanceof Error ? e.message : String(e)}` };
}
},
}));
tools.push(tool({
name: "move_file",
description: "Move or rename a file or directory.",
parameters: { source: z.string(), destination: z.string() },
implementation: async ({ source, destination }) => {
const sourcePath = validatePath(ctx.cwd, source);
const destPath = validatePath(ctx.cwd, destination);
await rename(sourcePath, destPath);
return { success: true, from: sourcePath, to: destPath };
},
}));
tools.push(tool({
name: "copy_file",
description: "Copy a file to a new location.",
parameters: { source: z.string(), destination: z.string() },
implementation: async ({ source, destination }) => {
const sourcePath = validatePath(ctx.cwd, source);
const destPath = validatePath(ctx.cwd, destination);
await copyFile(sourcePath, destPath);
return { success: true, from: sourcePath, to: destPath };
},
}));
tools.push(tool({
name: "find_files",
description: "Find files recursively in the current directory matching a glob pattern (e.g., '*.ts', 'src/**/*.json', 'README*').",
parameters: {
pattern: z.string().describe("Glob pattern to match filenames (e.g., '*.ts', '**/*.json', 'config.*'). Supports *, ?, and ** for recursive matching."),
max_depth: z.number().optional().describe("Maximum depth to search (default: 5)"),
},
implementation: async ({ pattern, max_depth }) => {
const depthLimit = max_depth ?? 5;
const foundFiles: string[] = [];
const hasPathSep = pattern.includes("/");
const regexStr = pattern
.replace(/[.+^${}()|[\]\\]/g, '\\$&')
.replace(/\*\*/g, '\0')
.replace(/\*/g, '[^/]*')
.replace(/\?/g, '[^/]')
.replace(/\0/g, '.*');
const globRegex = new RegExp(`^${regexStr}$`, 'i');
async function scan(dir: string, currentDepth: number) {
if (currentDepth > depthLimit) return;
try {
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
if (!SKIP_DIRS.has(entry.name)) await scan(fullPath, currentDepth + 1);
}
else if (entry.isFile()) {
const relativePath = fullPath.replace(ctx.cwd + "/", "");
const matchTarget = hasPathSep ? relativePath : entry.name;
if (globRegex.test(matchTarget)) foundFiles.push(fullPath);
}
}
} catch { /* ignore access errors */ }
}
await scan(ctx.cwd, 0);
return { found_files: foundFiles.slice(0, 100), count: foundFiles.length };
},
}));
tools.push(tool({
name: "get_file_metadata",
description: "Get metadata (size, dates) for a specific file.",
parameters: { path: z.string() },
implementation: async ({ path }) => {
try {
const targetPath = validatePath(ctx.cwd, path);
const stats = await stat(targetPath);
return { path: targetPath, size: stats.size, created: stats.birthtime, modified: stats.mtime, is_directory: stats.isDirectory(), is_file: stats.isFile() };
} catch (error) {
return { error: `Failed to get metadata: ${error instanceof Error ? error.message : String(error)}` };
}
},
}));
tools.push(tool({
name: "audit_html_assets",
description: text`
Analyze an HTML file and report all referenced assets (images, CSS, JS, fonts, videos, audio).
Detects: duplicate references, missing files, and unused files in the directory.
Use this BEFORE making changes to avoid repeating assets.
`,
parameters: {
file_path: z.string().describe("Path to the HTML file to audit"),
},
implementation: async ({ file_path }) => {
const fullPath = resolve(ctx.cwd, file_path);
const html = await readFile(fullPath, "utf-8");
const baseDir = dirname(fullPath);
// Extract all asset references from HTML
const assetPatterns = [
/(?:src|href|poster|data-src|data-bg|data-image|background)\s*=\s*["']([^"']+?)["']/gi,
/url\(\s*["']?([^"')]+?)["']?\s*\)/gi,
];
const referencedAssets: string[] = [];
for (const pattern of assetPatterns) {
let match;
while ((match = pattern.exec(html)) !== null) {
const ref = match[1].trim();
// Skip external URLs, data URIs, anchors, protocols
if (/^(https?:|data:|mailto:|tel:|#|javascript:)/i.test(ref)) continue;
referencedAssets.push(ref);
}
}
// Count occurrences
const refCounts = new Map<string, number>();
for (const ref of referencedAssets) {
refCounts.set(ref, (refCounts.get(ref) || 0) + 1);
}
// Check which files exist
const missing: string[] = [];
const found: string[] = [];
const duplicates: Array<{ file: string; count: number }> = [];
for (const [ref, count] of refCounts) {
const assetPath = resolve(baseDir, ref);
try {
await stat(assetPath);
found.push(ref);
} catch {
missing.push(ref);
}
if (count > 1) {
duplicates.push({ file: ref, count });
}
}
// Find unused files in directory (images/media only)
const mediaExts = new Set([".jpg", ".jpeg", ".png", ".gif", ".webp", ".svg", ".mp4", ".webm", ".mov", ".mp3", ".wav", ".ogg", ".avif", ".bmp", ".tiff"]);
const unused: string[] = [];
const scanDir = async (dir: string, rel: string) => {
try {
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isDirectory()) {
await scanDir(join(dir, entry.name), rel ? `${rel}/${entry.name}` : entry.name);
} else {
const ext = entry.name.toLowerCase().replace(/.*(\.[^.]+)$/, "$1");
if (mediaExts.has(ext)) {
const relPath = rel ? `${rel}/${entry.name}` : entry.name;
// Check if referenced (try both with and without ./ prefix)
if (!refCounts.has(relPath) && !refCounts.has(`./${relPath}`)) {
unused.push(relPath);
}
}
}
}
} catch { /* skip unreadable dirs */ }
};
await scanDir(baseDir, "");
return {
total_references: referencedAssets.length,
unique_assets: refCounts.size,
duplicates: duplicates.length > 0 ? duplicates.sort((a, b) => b.count - a.count) : "none",
missing_files: missing.length > 0 ? missing : "none",
unused_media_files: unused.length > 0 ? unused : "none",
all_referenced: [...refCounts.keys()].sort(),
};
},
}));
tools.push(tool({
name: "plan_image_layout",
description: text`
Deterministically assign images from a directory to a given number of sections/slides.
Guarantees NO image is repeated. Returns a JSON mapping of section index to assigned images.
Use this BEFORE generating HTML to plan which images go where.
The model should follow this plan exactly when creating the HTML.
`,
parameters: {
image_directory: z.string().describe("Path to the directory containing images (relative to CWD or absolute)."),
section_count: z.number().int().min(1).describe("Number of sections/slides to distribute images across."),
images_per_section: z.number().int().min(1).optional().describe("Max images per section (default: 1). Extra images go to gallery."),
shuffle: z.boolean().optional().describe("Randomize image order before assigning (default: false — keeps alphabetical)."),
},
implementation: async ({ image_directory, section_count, images_per_section, shuffle }) => {
const imgDir = resolve(ctx.cwd, image_directory);
const maxPerSection = images_per_section ?? 1;
// Scan for image files
const mediaExts = new Set([".jpg", ".jpeg", ".png", ".gif", ".webp", ".svg", ".avif", ".bmp", ".tiff"]);
const allImages: string[] = [];
const scanDir = async (dir: string, rel: string) => {
try {
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isDirectory()) {
await scanDir(join(dir, entry.name), rel ? `${rel}/${entry.name}` : entry.name);
} else {
const ext = entry.name.toLowerCase().replace(/.*(\.[^.]+)$/, "$1");
if (mediaExts.has(ext)) {
allImages.push(rel ? `${rel}/${entry.name}` : entry.name);
}
}
}
} catch { /* skip unreadable dirs */ }
};
await scanDir(imgDir, "");
if (allImages.length === 0) {
return { error: "No image files found in the specified directory." };
}
// Optionally shuffle
const images = [...allImages];
if (shuffle) {
for (let i = images.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[images[i], images[j]] = [images[j], images[i]];
}
} else {
images.sort();
}
// Distribute images across sections — round-robin, no repeats
const layout: Record<number, string[]> = {};
for (let i = 0; i < section_count; i++) {
layout[i + 1] = [];
}
let imgIndex = 0;
for (let round = 0; round < maxPerSection; round++) {
for (let section = 1; section <= section_count; section++) {
if (imgIndex >= images.length) break;
layout[section].push(images[imgIndex]);
imgIndex++;
}
if (imgIndex >= images.length) break;
}
// Remaining unassigned images
const unassigned = imgIndex < images.length ? images.slice(imgIndex) : [];
return {
total_images: images.length,
sections: section_count,
images_per_section: maxPerSection,
layout,
unassigned_images: unassigned.length > 0 ? unassigned : "none",
instruction: "Follow this layout exactly when generating HTML. Do NOT reuse any image across sections.",
};
},
}));
tools.push(tool({
name: "change_directory",
description: text`
Change the current working directory.
Returns the new current working directory.
`,
parameters: { directory: z.string() },
implementation: async ({ directory }) => {
const newPath = resolve(ctx.cwd, directory);
const stats = await stat(newPath);
if (!stats.isDirectory()) throw new Error(`Path is not a directory: ${newPath}`);
const previousDirectory = ctx.cwd;
ctx.cwd = newPath;
ctx.fullState.currentWorkingDirectory = newPath;
await ctx.saveState();
return { previous_directory: previousDirectory, current_directory: ctx.cwd };
},
}));
return tools;
}