src / config.ts
import { createConfigSchematics } from "@lmstudio/sdk";
// Static model list - update manually when models change
const modelOptions = [
{ value: "allenai/olmo-3-32b-think", displayName: "OLMo 3 32B Think (32B)" },
{ value: "essentialai/rnj-1", displayName: "RNJ-1 (8.3B)" },
{ value: "liquid/lfm2.5-1.2b", displayName: "LFM 2.5 1.2B (1.2B)" },
{ value: "zai-org/glm-4.7-flash", displayName: "GLM 4.7 Flash (30B)" },
];
export const configSchematics = createConfigSchematics()
// === MODEL SELECTION ===
.field("model", "select", {
displayName: "Model",
options: modelOptions,
}, modelOptions[0].value)
// === SAMPLING ===
.field("temperature", "numeric", {
displayName: "Temperature",
min: 0,
max: 2,
step: 0.01,
slider: { min: 0, max: 2, step: 0.01 },
precision: 2,
}, 0.7)
.field("topP", "numeric", {
displayName: "Top P",
subtitle: "0 = disabled",
min: 0,
max: 1,
step: 0.01,
precision: 2,
}, 0.9)
.field("topK", "numeric", {
displayName: "Top K",
subtitle: "0 = disabled",
min: 0,
step: 1,
}, 40)
.field("maxTokens", "numeric", {
displayName: "Max Tokens",
subtitle: "0 = unlimited",
min: 0,
step: 100,
}, 0)
.field("contextOverflowPolicy", "string", {
displayName: "Context Overflow",
subtitle: "stopAtLimit | truncateMiddle | rollingWindow",
}, "rollingWindow")
.build();
export const globalConfigSchematics = createConfigSchematics()
.field("baseUrl", "string", {
displayName: "Remote LM Studio URL",
placeholder: "ws://192.168.1.212:1234",
}, "ws://127.0.0.1:1234")
.field("clientIdentifier", "string", {
displayName: "Client ID",
subtitle: "Leave empty for auto",
}, "")
.field("clientPasskey", "string", {
displayName: "Client Passkey",
isProtected: true,
}, "")
.field("gpuOffloadRatio", "numeric", {
displayName: "GPU Offload",
min: 0,
max: 1,
step: 0.1,
precision: 1,
}, 1.0)
.build();