2 Downloads
Forked from lmstudio/openai-compat-endpoint
scripts / smoke.mjs
#!/usr/bin/env node
const DEFAULT_BASE_URL = process.env.IONOS_API_BASE || "https://openai.inference.de-txl.ionos.com/v1";
const DEFAULT_MODEL = process.env.IONOS_MODEL || "openai/gpt-oss-120b";
const apiKey = process.env.IONOS_API_KEY;
async function main() {
if (!apiKey) {
console.warn("[smoke] Kein IONOS_API_KEY gefunden. Ueberspringe Test.");
process.exit(0);
}
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 15_000);
try {
const response = await fetch(`${DEFAULT_BASE_URL.replace(/\/$/, "")}/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model: DEFAULT_MODEL,
stream: false,
messages: [
{
role: "user",
content: "Sag in einem Satz, dass die Verbindung funktioniert.",
},
],
}),
signal: controller.signal,
});
if (!response.ok) {
const snippet = (await response.text()).slice(0, 400);
throw new Error(`IONOS API antwortete mit ${response.status}: ${snippet}`);
}
const data = await response.json();
const text = data?.choices?.[0]?.message?.content ?? "<keine Antwort>";
console.log("[smoke] Erfolg:", text);
} catch (error) {
console.error("[smoke] Fehler:", error);
process.exitCode = 1;
} finally {
clearTimeout(timeoutId);
}
}
main();