openai-compat-endpoint-ionos

Public

Forked from lmstudio/openai-compat-endpoint

src / config.ts

import { createConfigSchematics } from "@lmstudio/sdk";

const DEFAULT_MODEL = "openai/gpt-oss-120b";
const DEFAULT_API_BASE = "https://openai.inference.de-txl.ionos.com/v1";

export const configSchematics = createConfigSchematics()
  .field(
    "model",
    "select",
    {
      displayName: "IONOS Modell",
      subtitle: "Standard ist gpt-oss-120b; passe optional an.",
      options: [
        { value: DEFAULT_MODEL, displayName: "openai/gpt-oss-120b (IONOS)" },
        { value: "meta-llama/Llama-3.3-70B-Instruct", displayName: "meta-llama/Llama-3.3-70B-Instruct" },
        { value: "gpt-4.1-2025-04-14", displayName: "gpt-4.1-2025-04-14" },
        { value: "gpt-4.1-mini-2025-04-14", displayName: "gpt-4.1-mini-2025-04-14" },
      ],
    },
    DEFAULT_MODEL,
  )
  .field(
    "apiBaseUrl",
    "string",
    {
      displayName: "API Base URL",
      subtitle: "Standard: IONOS AI Model Hub. Nur aendern, wenn du ein eigenes Gateway nutzt.",
      placeholder: DEFAULT_API_BASE,
    },
    DEFAULT_API_BASE,
  )
  .build();

export const globalConfigSchematics = createConfigSchematics()
  .field(
    "openaiApiKey",
    "string",
    {
      displayName: "IONOS API Key",
      subtitle: "IONOS API Key - bitte hier eintragen; wird nicht versioniert.",
      isProtected: true,
      placeholder: "ionos-...",
    },
    "",
  )
  .build();

export const CONFIG_DEFAULTS = {
  DEFAULT_MODEL,
  DEFAULT_API_BASE,
};