Forked from tupik/openai-compat-endpoint
src / config.ts
// src/config.ts
import { createConfigSchematics } from "@lmstudio/sdk";
export const globalConfigSchematics = createConfigSchematics()
.field(
"apiKey",
"string",
{
displayName: "API Key",
isProtected: true,
placeholder: "sk-or-v1-..."
},
""
)
.field(
"baseUrl",
"string",
{
displayName: "Base URL",
subtitle: "Base URL for API calls.",
placeholder: "https://openrouter.ai/api/v1"
},
"https://openrouter.ai/api/v1"
)
.build();
export const configSchematics = createConfigSchematics()
.field(
"model",
"string",
{
displayName: "Model",
subtitle: "OpenAI/OpenRouter model ID (e.g., allenai/molmo-2-8b:free)",
placeholder: "bytedance-seed/seed-1.6-flash"
},
""
)
.field(
"debug",
"boolean",
{
displayName: "Debug Logging",
subtitle: "Log request payloads and internal debug details.",
},
false
)
.build();