Forked from tupik/openai-compat-endpoint
src / file-cache.ts
// src/file-cache.ts
// Module for caching model list to file
import * as fs from 'fs';
import * as path from 'path';
const CACHE_FILE = 'models-cache.json';
export interface ModelCache {
timestamp: number;
allModels: string[];
freeModels: string[];
}
/**
* Gets path to cache file in plugin directory
*/
function getCachePath(): string {
return path.join(__dirname, '..', CACHE_FILE);
}
/**
* Loads cache from file
* Cache is stored indefinitely — TTL removed for manual user loading
*/
export function loadCache(): ModelCache | null {
try {
const cachePath = getCachePath();
if (!fs.existsSync(cachePath)) {
return null;
}
const data = fs.readFileSync(cachePath, 'utf-8');
const cache: ModelCache = JSON.parse(data);
return cache;
} catch (error) {
console.error('[Cache] Failed to load cache:', error);
return null;
}
}
/**
* Saves cache to file
*/
export function saveCache(allModels: string[], freeModels: string[]): void {
try {
const cache: ModelCache = {
timestamp: Date.now(),
allModels,
freeModels
};
const cachePath = getCachePath();
fs.writeFileSync(cachePath, JSON.stringify(cache, null, 2), 'utf-8');
} catch (error) {
console.error('[Cache] Failed to save cache:', error);
}
}