Forked from tupik/openai-compat-endpoint
src / index.ts
// src/index.ts
// ΠΠ»Π°Π²Π½ΡΠΉ ΡΠ°ΠΉΠ» ΠΏΠ»Π°Π³ΠΈΠ½Π°
import { generate } from './generator';
import { loadCache, saveCache, createConfigSchema, globalConfigSchematics, type ModelCache, generateDisplayName } from './schema';
import { fetchModels, filterFreeModels } from './api';
function buildModelOptions(freeModels: string[]): Array<{ value: string; displayName: string }> {
const options: Array<{ value: string; displayName: string }> = [
{ value: 'auto', displayName: 'β Auto β' }
];
for (const model of freeModels) {
options.push({
value: model,
displayName: generateDisplayName(model)
});
}
return options;
}
export async function main(context: any) {
console.log('[Index] main() started');
// ΠΠ°Π³ΡΡΠΆΠ°Π΅ΠΌ ΠΊΡΡ ΠΌΠΎΠ΄Π΅Π»Π΅ΠΉ
const cache = loadCache();
console.log('[Index] Cache loaded:', cache ? 'yes' : 'no');
const freeModels = cache?.freeModels ?? [];
console.log('[Index] Free models count:', freeModels.length);
if (freeModels.length > 0) {
console.log('[Index] First 5 models:', freeModels.slice(0, 5));
} else {
console.warn('[Index] β οΈ No models in cache β only "Auto" option available. Restart plugin after first generation to load models.');
}
// Π‘ΠΎΠ·Π΄Π°ΡΠΌ ΡΡ
Π΅ΠΌΡ ΠΊΠΎΠ½ΡΠΈΠ³ΡΡΠ°ΡΠΈΠΈ Ρ ΠΎΠΏΡΠΈΡΠΌΠΈ ΠΈΠ· ΠΊΡΡΠ°
const modelOptions = buildModelOptions(freeModels);
console.log('[Index] Model options count:', modelOptions.length);
const configSchematics = createConfigSchema(modelOptions);
console.log('[Index] Schema created');
context.withConfigSchematics(configSchematics);
console.log('[Index] Config schematics registered');
context.withGlobalConfigSchematics(globalConfigSchematics);
context.withGenerator(generate);
console.log('[Index] Generator registered');
// ΠΡΠΈΠ½Ρ
ΡΠΎΠ½Π½ΠΎ ΠΎΠ±Π½ΠΎΠ²Π»ΡΠ΅ΠΌ ΠΊΡΡ Π² ΡΠΎΠ½Π΅ (Π½Π΅ Π±Π»ΠΎΠΊΠΈΡΡΠ΅Ρ UI)
refreshCacheInBackground();
}
async function refreshCacheInBackground() {
try {
console.log('[Index] Refreshing models in background...');
const baseUrl = "https://openrouter.ai/api/v1";
const allModels = await fetchModels(baseUrl, "");
const freeModels = filterFreeModels(allModels);
if (freeModels.length > 0) {
const newCache: ModelCache = {
timestamp: Date.now(),
allModels: allModels,
freeModels: freeModels
};
saveCache(allModels, freeModels);
console.log('[Index] Cache refreshed:', freeModels.length, 'free models');
} else {
console.warn('[Index] API returned no models');
}
} catch (error) {
console.error('[Index] Failed to refresh cache:', error);
}
}