Forked from lmstudio/openai-compat-endpoint
src / config.ts
import { createConfigSchematics } from "@lmstudio/sdk";
// This file contains the definition of configuration schematics for your plugin.
export const configSchematics = createConfigSchematics()
.field(
"modelList",
"select",
{
displayName: "Model",
subtitle: "Select a model or choose 'Custom Model ID' to enter your own below.",
options: [
{ value: "gpt-4o", displayName: "GPT-4o (OpenAI)" },
{ value: "gpt-4o-mini", displayName: "GPT-4o Mini (OpenAI)" },
{ value: "claude-3-5-sonnet-20241022", displayName: "Claude 3.5 Sonnet (Anthropic)" },
{ value: "claude-3-5-haiku-20241022", displayName: "Claude 3.5 Haiku (Anthropic)" },
{ value: "llama-3.1-8b-instruct", displayName: "Llama 3.1 8B Instruct (Local)" },
{ value: "custom", displayName: "[Custom Model ID]" },
],
},
"custom",
)
.field(
"customModelId",
"string",
{
displayName: "Custom Model ID",
subtitle: "If you selected '[Custom Model ID]' above, enter the specific model ID here.",
placeholder: "e.g. gpt-4 or local-model",
},
"gpt-4o",
)
.field(
"apiFormat",
"select",
{
displayName: "API Format",
subtitle: "Select which API format to use.",
options: [
{ value: "openai", displayName: "OpenAI Compatible" },
{ value: "anthropic", displayName: "Anthropic Compatible" },
],
},
"openai",
)
.field(
"temperature",
"numeric",
{
displayName: "Temperature",
min: 0,
step: 0.01,
slider: { min: 0, max: 2, step: 0.01 },
precision: 2,
shortHand: "temp",
},
0.8,
)
.build();
export const globalConfigSchematics = createConfigSchematics()
.field(
"endpointUrl",
"string",
{
displayName: "Endpoint Base URL",
subtitle: "The base URL for API calls. If you use a local LM Studio server, keep the default.",
placeholder: "http://localhost:1234/v1/",
},
"http://localhost:1234/v1/",
)
.field(
"openaiApiKey",
"string",
{
displayName: "OpenAI API Key",
isProtected: true,
placeholder: "sk-...",
},
"",
)
.field(
"anthropicApiKey",
"string",
{
displayName: "Anthropic API Key",
isProtected: true,
placeholder: "ant-...",
},
"",
)
.build();