Forked from tupik/openai-compat-endpoint
src / index.ts
// src/index.ts
// Main plugin file
import { generate } from './generator';
import { loadCache, saveCache, createConfigSchema, globalConfigSchematics, type ModelCache, generateDisplayName } from './schema';
import { fetchModels, filterFreeModels } from './api';
function buildModelOptions(freeModels: string[]): Array<{ value: string; displayName: string }> {
const options: Array<{ value: string; displayName: string }> = [
{ value: 'auto', displayName: '— Auto —' }
];
for (const model of freeModels) {
options.push({
value: model,
displayName: generateDisplayName(model)
});
}
return options;
}
export async function main(context: any) {
console.log('[Index] main() started');
const cache = loadCache();
console.log('[Index] Cache loaded:', cache ? 'yes' : 'no');
const freeModels = cache?.freeModels ?? [];
console.log('[Index] Free models count:', freeModels.length);
if (freeModels.length > 0) {
console.log('[Index] First 5 models:', freeModels.slice(0, 5));
} else {
console.warn('[Index] ⚠️ No models in cache — only "Auto" option available. Restart plugin after first generation to load models.');
}
const modelOptions = buildModelOptions(freeModels);
console.log('[Index] Model options count:', modelOptions.length);
const configSchematics = createConfigSchema(modelOptions);
console.log('[Index] Schema created');
context.withConfigSchematics(configSchematics);
console.log('[Index] Config schematics registered');
context.withGlobalConfigSchematics(globalConfigSchematics);
context.withGenerator(generate);
console.log('[Index] Generator registered');
refreshCacheInBackground();
}
async function refreshCacheInBackground() {
try {
console.log('[Index] Refreshing models in background...');
const baseUrl = "https://openrouter.ai/api/v1";
const allModels = await fetchModels(baseUrl, "");
const freeModels = filterFreeModels(allModels);
if (freeModels.length > 0) {
const newCache: ModelCache = {
timestamp: Date.now(),
allModels: allModels,
freeModels: freeModels
};
saveCache(allModels, freeModels);
console.log('[Index] Cache refreshed:', freeModels.length, 'free models');
} else {
console.warn('[Index] API returned no models');
}
} catch (error) {
console.error('[Index] Failed to refresh cache:', error);
}
}