Project Files
dist / generator.js
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.generate = generate;
const openai_1 = __importDefault(require("openai"));
const config_1 = require("./config");
const inventory_1 = require("./inventory");
const modelCache_1 = require("./modelCache");
const modelsCache = {
baseUrl: "",
inventoryMode: "health",
models: [],
lastFetchedAt: 0,
inFlight: null,
};
let refreshNotified = false;
const modelsRefreshIntervalMs = 15_000;
function sameModelList(a, b) {
if (a.length !== b.length)
return false;
for (let i = 0; i < a.length; i += 1) {
if (a[i]?.id !== b[i]?.id || a[i]?.displayName !== b[i]?.displayName)
return false;
}
return true;
}
function modelIds(models) {
return models.map(model => model.id);
}
async function refreshModelOptionsIfNeeded(shimBaseUrl, inventoryMode, currentModel, forceRefresh) {
const inventoryUrl = (0, inventory_1.buildInventoryUrl)(shimBaseUrl, inventoryMode);
if (!inventoryUrl) {
console.info("Model refresh skipped: shimBaseUrl is empty.");
return { models: modelsCache.models, didUpdate: false };
}
if (typeof fetch !== "function") {
console.warn("Fetch is not available; skipping model list refresh.");
return { models: modelsCache.models, didUpdate: false };
}
const now = Date.now();
if (modelsCache.inFlight) {
await modelsCache.inFlight;
return { models: modelsCache.models, didUpdate: false };
}
if (!forceRefresh &&
modelsCache.baseUrl === inventoryUrl &&
modelsCache.inventoryMode === inventoryMode &&
now - modelsCache.lastFetchedAt < modelsRefreshIntervalMs) {
return { models: modelsCache.models, didUpdate: false };
}
let didUpdate = false;
modelsCache.inFlight = (async () => {
try {
const { models } = await (0, inventory_1.fetchGeneratorModelIds)(shimBaseUrl, inventoryMode);
const { models: embedModels } = await (0, inventory_1.fetchEmbedModelIds)(shimBaseUrl, inventoryMode);
const ids = modelIds(models);
const preferredHostedModel = await (0, inventory_1.fetchPreferredHostedModelId)(shimBaseUrl).catch(() => undefined);
if (currentModel && currentModel !== config_1.placeholderModelValue && !ids.includes(currentModel)) {
console.info(`Selected model ${currentModel} is not present in shim inventory.`);
}
if (!models.length) {
console.info(`Model refresh returned no models from ${inventoryUrl}`);
return;
}
if (!forceRefresh &&
modelsCache.baseUrl === inventoryUrl &&
modelsCache.inventoryMode === inventoryMode &&
sameModelList(modelsCache.models, models)) {
modelsCache.lastFetchedAt = now;
return;
}
modelsCache.baseUrl = inventoryUrl;
modelsCache.inventoryMode = inventoryMode;
modelsCache.models = models;
modelsCache.lastFetchedAt = now;
const lastSelected = currentModel && currentModel !== config_1.placeholderModelValue && ids.includes(currentModel)
? currentModel
: preferredHostedModel && ids.includes(preferredHostedModel)
? preferredHostedModel
: "";
(0, config_1.updateModelOptions)(models, lastSelected ?? "");
(0, config_1.updateEmbedModelOptions)(embedModels, "");
await (0, modelCache_1.writeCachedState)(models, embedModels, lastSelected, undefined, inventoryMode, shimBaseUrl);
didUpdate = true;
console.info(`Model options refreshed from ${inventoryUrl}`);
}
catch (error) {
console.warn("Failed to refresh model list:", error);
}
finally {
modelsCache.inFlight = null;
}
})();
await modelsCache.inFlight;
return { models: modelsCache.models, didUpdate };
}
function createOpenAI(globalConfig) {
const shimBaseUrl = globalConfig.get("shimBaseUrl");
const baseURL = (0, inventory_1.normalizeBaseUrl)(shimBaseUrl) || "http://127.0.0.1:9000/v1";
return new openai_1.default({
apiKey: "local",
baseURL,
});
}
function toOpenAIMessages(history) {
const messages = [];
for (const message of history) {
switch (message.getRole()) {
case "system":
messages.push({ role: "system", content: message.getText() });
break;
case "user":
messages.push({ role: "user", content: message.getText() });
break;
case "assistant": {
const toolCalls = message
.getToolCallRequests()
.map(toolCall => ({
id: toolCall.id ?? "",
type: "function",
function: {
name: toolCall.name,
arguments: JSON.stringify(toolCall.arguments ?? {}),
},
}));
messages.push({
role: "assistant",
content: message.getText(),
...(toolCalls.length ? { tool_calls: toolCalls } : {}),
});
break;
}
case "tool": {
message.getToolCallResults().forEach(toolCallResult => {
messages.push({
role: "tool",
tool_call_id: toolCallResult.toolCallId ?? "",
content: toolCallResult.content,
});
});
break;
}
}
}
return messages;
}
function toOpenAITools(ctl) {
const tools = ctl.getToolDefinitions().map(tool => ({
type: "function",
function: {
name: tool.function.name,
description: tool.function.description,
parameters: tool.function.parameters ?? {},
},
}));
return tools.length ? tools : undefined;
}
function wireAbort(ctl, stream) {
ctl.onAborted(() => {
console.info("Generation aborted by user.");
stream.controller.abort();
});
}
function parseToolArguments(raw) {
try {
return raw.length ? JSON.parse(raw) : {};
}
catch (error) {
console.warn("Failed to parse tool call arguments as JSON; falling back to empty object.", error);
return {};
}
}
async function consumeStream(stream, ctl) {
let current = null;
function maybeFlushCurrentToolCall() {
if (current === null || current.name === null) {
return;
}
ctl.toolCallGenerationEnded({
type: "function",
name: current.name,
arguments: parseToolArguments(current.arguments),
id: current.id,
});
current = null;
}
for await (const chunk of stream) {
const delta = chunk.choices?.[0]?.delta;
if (!delta)
continue;
if (delta.content) {
ctl.fragmentGenerated(delta.content);
}
for (const toolCall of delta.tool_calls ?? []) {
if (toolCall.id !== undefined) {
maybeFlushCurrentToolCall();
current = { id: toolCall.id, name: null, index: toolCall.index, arguments: "" };
ctl.toolCallGenerationStarted();
}
if (toolCall.function?.name && current) {
current.name = toolCall.function.name;
ctl.toolCallGenerationNameReceived(toolCall.function.name);
}
if (toolCall.function?.arguments && current) {
current.arguments += toolCall.function.arguments;
ctl.toolCallGenerationArgumentFragmentGenerated(toolCall.function.arguments);
}
}
if (chunk.choices?.[0]?.finish_reason === "tool_calls" && current?.name) {
maybeFlushCurrentToolCall();
}
}
console.info("Generation completed.");
}
async function generate(ctl, history) {
const config = ctl.getPluginConfig(config_1.configSchematics);
const globalConfig = ctl.getGlobalPluginConfig(config_1.globalConfigSchematics);
const shimBaseUrl = globalConfig.get("shimBaseUrl");
const inventoryMode = globalConfig.get("inventoryMode") || "health";
const selectedModel = config.get("model");
const currentInventoryUrl = (0, inventory_1.buildInventoryUrl)(shimBaseUrl, inventoryMode);
const cacheMatchesActiveInventory = modelsCache.baseUrl === currentInventoryUrl && modelsCache.inventoryMode === inventoryMode;
const selectedModelKnown = !selectedModel || selectedModel === config_1.placeholderModelValue
? false
: modelIds(modelsCache.models).includes(selectedModel);
console.info(`Generate: model=${selectedModel ?? ""} baseUrl=${shimBaseUrl ?? ""} inventoryMode=${inventoryMode} cached=${modelsCache.models.length}`);
if (!shimBaseUrl) {
ctl.fragmentGenerated("Set the shim base URL, then retry.");
return;
}
const shouldRefreshModels = !selectedModel || selectedModel === config_1.placeholderModelValue || modelsCache.models.length === 0;
const shouldReconcileInventory = !cacheMatchesActiveInventory || !selectedModelKnown;
if (shouldRefreshModels || shouldReconcileInventory) {
const refreshResult = await refreshModelOptionsIfNeeded(shimBaseUrl, inventoryMode, selectedModel, true);
if (!selectedModel || selectedModel === config_1.placeholderModelValue) {
if (refreshResult.didUpdate) {
if (!refreshNotified) {
ctl.fragmentGenerated("Inventory refreshed. Select a model and retry.");
refreshNotified = true;
}
}
else {
ctl.fragmentGenerated("Inventory not updated. Verify the shim and try again.");
}
return;
}
if (selectedModel && selectedModel !== config_1.placeholderModelValue) {
const modelStillMissing = !modelIds(refreshResult.models).includes(selectedModel);
if (modelStillMissing) {
throw new Error(`Selected model "${selectedModel}" is not present in the shim inventory for ${inventoryMode} mode.`);
}
}
}
if (!selectedModel || selectedModel === config_1.placeholderModelValue) {
throw new Error("Model inventory not loaded yet. Refresh the shim inventory and retry.");
}
const openai = createOpenAI(globalConfig);
const messages = toOpenAIMessages(history);
const tools = toOpenAITools(ctl);
const stream = await openai.chat.completions.create({
model: selectedModel,
messages,
tools,
stream: true,
});
wireAbort(ctl, stream);
await consumeStream(stream, ctl);
}
//# sourceMappingURL=generator.js.map