Project Files
src / helpers / visionCapabilityPrimer.ts
/**
* Vision Capability Primer
*
* Workaround to enable vision capabilities in LM Studio chat UI.
*
* IMPORTANT:
* Newer LM Studio `lms` CLI versions may block during model discovery/authentication.
* Awaiting `lms load` during plugin startup can therefore cause the Plugin Loader to time out.
*
* Pattern used here:
* - `checkVisionPrimerStatus()` is fast (short timeouts) and safe to await during startup
* - `loadVisionPrimerModel()` is slow and should be fire-and-forget
*/
import { exec } from "child_process";
import { promisify } from "util";
import path from "path";
import os from "os";
const execAsync = promisify(exec);
/** Configuration for the vision priming model */
export interface VisionPrimerConfig {
/** Model key to load (e.g., "qwen/qwen3-vl-4b") */
modelKey: string;
/** Context length (default: 4096) */
contextLength?: number;
/** GPU mode: "off" for CPU-only, "max" for full GPU, or a number 0-1 */
gpuMode?: "off" | "max" | number;
/** TTL in seconds (default: 3600 = 1h) */
ttlSeconds?: number;
/** Unique identifier for API access (default: "vision-capability-priming") */
identifier?: string;
}
/** Result of the priming operation */
export interface VisionPrimerResult {
ok: boolean;
alreadyLoaded?: boolean;
identifier?: string;
size?: string;
loadTimeSec?: number;
error?: string;
notInstalled?: boolean;
loadFailed?: boolean;
infrastructureError?: boolean;
userFacingError?: string;
}
/** Find the lms CLI path */
export async function findLmsCli(): Promise<string | null> {
const candidates = [
path.join(os.homedir(), ".lmstudio", "bin", "lms"),
"/usr/local/bin/lms",
"/opt/homebrew/bin/lms",
];
for (const candidate of candidates) {
try {
await execAsync(`"${candidate}" -h`);
return candidate;
} catch {
// Not found, try next
}
}
try {
const { stdout } = await execAsync("which lms");
const lmsPath = stdout.trim();
if (lmsPath) return lmsPath;
} catch {}
return null;
}
/** Result of model installation check */
interface ModelInstallationResult {
installed: boolean;
modelInfo?: {
modelKey: string;
displayName?: string;
sizeBytes?: number;
vision?: boolean;
};
error?: string;
}
/**
* Check if a model is installed locally using `lms ls --json`.
*
* NOTE: `lms ls --json` output size can be large; use a larger buffer.
* Also fall back to plain `lms ls` if JSON is truncated/invalid.
*/
export async function isModelInstalled(
lmsCli: string,
modelKey: string,
timeoutMs = 5000
): Promise<ModelInstallationResult> {
const normalizedKey = modelKey.toLowerCase().trim();
try {
const { stdout } = await execAsync(`"${lmsCli}" ls --json`, {
timeout: timeoutMs,
maxBuffer: 10 * 1024 * 1024,
});
let models: any[];
try {
models = JSON.parse(stdout);
} catch (parseErr) {
try {
const { stdout: textOut, stderr: textErr } = await execAsync(
`"${lmsCli}" ls`,
{
timeout: Math.max(1000, timeoutMs),
maxBuffer: 10 * 1024 * 1024,
}
);
const haystack = `${textOut}\n${textErr}`.toLowerCase();
if (haystack.includes(normalizedKey)) {
return { installed: true, modelInfo: { modelKey } };
}
return {
installed: false,
error: `Failed to parse lms ls --json output (fallback used): ${(parseErr as Error)?.message || parseErr}`,
};
} catch (fallbackErr: any) {
return {
installed: false,
error: `Failed to parse lms ls --json output and fallback ls failed: ${(parseErr as Error)?.message || parseErr} / ${fallbackErr?.message || String(fallbackErr)}`,
};
}
}
if (!Array.isArray(models)) {
return {
installed: false,
error: "lms ls --json returned non-array",
};
}
const found = models.find((m: any) => {
const key = String(m?.modelKey || "").toLowerCase().trim();
return key === normalizedKey;
});
if (found) {
return {
installed: true,
modelInfo: {
modelKey: found.modelKey,
displayName: found.displayName,
sizeBytes: found.sizeBytes,
vision: found.vision === true,
},
};
}
return { installed: false };
} catch (e: any) {
return {
installed: false,
error: e?.message || String(e),
};
}
}
/**
* Check if a model with the given identifier is already loaded using `lms ps --json`.
*/
export async function isModelLoaded(
lmsCli: string,
identifier: string,
timeoutMs = 5000
): Promise<boolean> {
const normalizedId = identifier.toLowerCase().trim();
try {
const { stdout } = await execAsync(`"${lmsCli}" ps --json`, {
timeout: timeoutMs,
});
let instances: any[];
try {
instances = JSON.parse(stdout);
} catch {
return false;
}
if (!Array.isArray(instances)) {
return false;
}
return instances.some((inst: any) => {
const instId = String(inst?.identifier || "").toLowerCase().trim();
return instId === normalizedId;
});
} catch {
return false;
}
}
/** Result of quick status check (fast, can be awaited during startup) */
export interface VisionPrimerQuickCheck {
lmsCli: string | null;
installed: boolean;
alreadyLoaded: boolean;
needsLoad: boolean;
infrastructureError?: boolean;
userFacingError?: string;
error?: string;
notInstalled?: boolean;
}
/**
* Quick status check for vision primer (FAST - safe to await during startup).
* Checks CLI availability, installation status, and loaded status.
* Does NOT attempt to load the model.
*/
export async function checkVisionPrimerStatus(
config: VisionPrimerConfig
): Promise<VisionPrimerQuickCheck> {
const { modelKey, identifier = "vision-capability-priming" } = config;
console.debug("[VisionPrimer] Quick status check...");
const lmsCli = await findLmsCli();
if (!lmsCli) {
console.warn("[VisionPrimer] lms CLI not found (infrastructure error, silent)");
return {
lmsCli: null,
installed: false,
alreadyLoaded: false,
needsLoad: false,
infrastructureError: true,
error: "lms CLI not found. Is LM Studio installed?",
};
}
const installResult = await isModelInstalled(lmsCli, modelKey, 5000);
if (installResult.error) {
console.warn("[VisionPrimer] Installation check failed:", installResult.error);
return {
lmsCli,
installed: false,
alreadyLoaded: false,
needsLoad: false,
infrastructureError: true,
error: `Installation check failed: ${installResult.error}`,
};
}
if (!installResult.installed) {
const userMsg = `**Vision Attachment Support:**\n\nThe vision-capability-priming model \`${modelKey}\` is not installed.\n\nThis model enables image attachments in the chat UI. To install it:\n1. Open LM Studio\n2. Search for \`${modelKey}\`\n3. Download the model\n\nWithout this model, you can still use text prompts.`;
console.warn(`[VisionPrimer] Model not installed: ${modelKey}`);
return {
lmsCli,
installed: false,
alreadyLoaded: false,
needsLoad: false,
notInstalled: true,
userFacingError: userMsg,
error: `Model '${modelKey}' is not installed locally.`,
};
}
const loaded = await isModelLoaded(lmsCli, identifier, 5000);
if (loaded) {
console.debug(`[VisionPrimer] Model already loaded with identifier: ${identifier}`);
return {
lmsCli,
installed: true,
alreadyLoaded: true,
needsLoad: false,
};
}
return {
lmsCli,
installed: true,
alreadyLoaded: false,
needsLoad: true,
};
}
/**
* Load a vision model to prime capabilities in LM Studio UI.
*/
/**
* Load the vision primer model (SLOW - should be fire-and-forget).
* Only call this after checkVisionPrimerStatus() returns needsLoad: true.
*/
export async function loadVisionPrimerModel(
lmsCli: string,
config: VisionPrimerConfig
): Promise<VisionPrimerResult> {
const startTs = Date.now();
const {
modelKey,
contextLength = 4096,
gpuMode = "off",
ttlSeconds = 3600,
identifier = "vision-capability-priming",
} = config;
const gpuArg =
gpuMode === "off"
? "--gpu off"
: gpuMode === "max"
? "--gpu max"
: `--gpu ${gpuMode}`;
const cmd = [
`"${lmsCli}"`,
"load",
modelKey,
`--context-length ${contextLength}`,
gpuArg,
`--ttl ${ttlSeconds}`,
`--identifier "${identifier}"`,
].join(" ");
console.debug("[VisionPrimer] Running:", cmd);
try {
const { stdout, stderr } = await execAsync(cmd, {
timeout: 120_000,
});
const output = stdout + stderr;
const loadTimeSec = Math.round(((Date.now() - startTs) / 1000) * 100) / 100;
const sizeMatch = output.match(/\(([0-9.]+ [A-Z]+)\)/i);
const size = sizeMatch ? sizeMatch[1] : undefined;
console.debug(`[VisionPrimer] ✓ Model loaded in ${loadTimeSec}s`);
return { ok: true, alreadyLoaded: false, identifier, size, loadTimeSec };
} catch (e: any) {
const error = e.stderr || e.message || String(e);
console.debug(
"[VisionPrimer] Load command failed, checking if model is now loaded anyway..."
);
const nowLoaded = await isModelLoaded(lmsCli, identifier, 5000);
if (nowLoaded) {
return { ok: true, alreadyLoaded: true, identifier };
}
console.warn("[VisionPrimer] Failed:", error);
return { ok: false, loadFailed: true, error };
}
}
/**
* Back-compat convenience wrapper.
* WARNING: Awaiting this during plugin startup may cause loader timeouts.
* Prefer: checkVisionPrimerStatus() + loadVisionPrimerModel() (fire-and-forget).
*/
export async function primeVisionCapability(
config: VisionPrimerConfig
): Promise<VisionPrimerResult> {
const quick = await checkVisionPrimerStatus(config);
if (!quick.lmsCli) {
return {
ok: false,
infrastructureError: true,
error: quick.error || "lms CLI not found",
};
}
if (quick.userFacingError) {
return {
ok: false,
notInstalled: quick.notInstalled,
userFacingError: quick.userFacingError,
error: quick.error,
};
}
if (quick.alreadyLoaded) {
return { ok: true, alreadyLoaded: true, identifier: config.identifier };
}
if (quick.needsLoad) {
return await loadVisionPrimerModel(quick.lmsCli, config);
}
return { ok: false, error: quick.error || "Unknown primer state" };
}
/** Unload the vision priming model */
export async function unloadVisionPrimer(
identifier = "vision-capability-priming"
): Promise<boolean> {
const lmsCli = await findLmsCli();
if (!lmsCli) return false;
try {
await execAsync(`"${lmsCli}" unload "${identifier}"`);
console.debug(`[VisionPrimer] ✓ Unloaded: ${identifier}`);
return true;
} catch (e: any) {
console.warn("[VisionPrimer] Failed to unload:", e.message || e);
return false;
}
}