Project Files
src / tools / zoom-in.ts
import { tool, type Tool, type ToolsProviderController } from "@lmstudio/sdk";
import { z } from "zod";
import { ZoomInToolParamsShape, formatToolMetaBlock, setActiveChatContext, syncAttachmentsToState } from "../core-bundle.mjs";
import path from "path";
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { quality: _hiddenQuality, prompt: _hiddenPrompt, model: _hiddenModel, ...ZoomInToolParamsShapeAgent } = ZoomInToolParamsShape;
export function createZoomInTool(ctl: ToolsProviderController): Tool {
return tool({
name: "zoom-in",
description: `Re-render the cropped canvas region at full resolution via Draw Things image2image.
Workflow: First use 'crop' (or 'detect_object') to select the region, then call 'zoom-in' on the source canvas.
zoom-in reads the stored crop metadata automatically — no crop parameters needed.
Alternatively, use detectLabel to target a detected object directly without a prior crop step:
- detectLabel: label text to match (e.g. 'cat', 'a person') — requires a prior detect_object run.
- detectIndex: zero-based index when multiple matches exist (default: 0).
- frameAdjust: expand (+) or shrink (-) the bounding box before rendering. Number = % of bbox dimensions (preserves AR); string with 'px' suffix = absolute pixel margin. E.g. 5, -3, '20px'.
canvas may be the original source (e.g. 'a1') or the annotated detect_object result (e.g. 'i3').
Example: canvas='i3', detectLabel='cat', frameAdjust=8
Parameters:
- canvas: Source image. Notation: 'a1', 'v2', 'p1', 'i3'.
- width / height: Output dimensions. Default: derived from canvas aspect ratio.
- imageFormat: Target aspect ratio ('square', 'landscape', 'portrait', '16:9'). Expands the crop region to match the AR before re-rendering.
Returns: Inline preview, JSON with file URLs, crop info, model info, and clickable links.
${formatToolMetaBlock()}`,
parameters: ZoomInToolParamsShapeAgent as Record<string, z.ZodTypeAny>,
implementation: async (args: any, ctx: any) => {
const onProgress = (step: number, totalSteps: number | undefined, message?: string) => {
try {
if (step === -1 && message) { ctx.status(message); return; }
const prefix = message ? `${message} ` : "";
if (totalSteps && totalSteps > 0) {
ctx.status(`${prefix}Step ${step}/${totalSteps} (${Math.round((step / (totalSteps + 1)) * 100)}%)`);
} else {
ctx.status(`${prefix}Step ${step}...`);
}
} catch {}
};
try {
const workingDir = ctl.getWorkingDirectory();
if (typeof workingDir === "string" && workingDir.trim().length > 0) {
const chatId = path.basename(workingDir);
if (/^\d+$/.test(chatId)) {
setActiveChatContext({ chatId, workingDir, requestId: `tool-${Date.now()}` });
}
}
} catch {}
try {
const workingDir = ctl.getWorkingDirectory();
if (typeof workingDir === "string" && workingDir.trim().length > 0) {
await syncAttachmentsToState(workingDir, false, Number.MAX_SAFE_INTEGER);
}
} catch {}
const mod = await import("../core/tools.js");
const resp = await mod.handleZoomIn(args, onProgress);
if (resp && Array.isArray(resp.content)) return resp.content;
return typeof resp === "string" ? resp : JSON.stringify(resp);
},
});
}