init
This commit is contained in:
parent
5088bf8dcb
commit
afa907065b
4
.gitignore
vendored
4
.gitignore
vendored
@ -2,3 +2,7 @@
|
||||
/coverage
|
||||
*.log
|
||||
.DS_Store
|
||||
*.env*
|
||||
scripts
|
||||
tests
|
||||
|
||||
|
||||
101
dist-in/cache.js
Normal file
101
dist-in/cache.js
Normal file
@ -0,0 +1,101 @@
|
||||
import { getCache } from './commons/cache/index.js';
|
||||
import { appEvents } from './events.js';
|
||||
import pino from 'pino';
|
||||
import path from 'path';
|
||||
const logFile = path.join(process.cwd(), 'logs', 'cache.json');
|
||||
const fileTransport = pino.transport({
|
||||
target: 'pino/file',
|
||||
options: { destination: logFile, mkdir: true }
|
||||
});
|
||||
const logger = pino({
|
||||
level: process.env.PINO_LOG_LEVEL || 'info',
|
||||
base: { product: 'cache' },
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
}, pino.multistream([
|
||||
{ stream: fileTransport, level: 'info' }
|
||||
]));
|
||||
export class AppCache {
|
||||
static instance;
|
||||
// Dependencies: key -> [dependencies]
|
||||
// Defines what each type DEPENDS ON.
|
||||
// If 'categories' changes, any type that has 'categories' in its dependency list must be invalidated.
|
||||
static DEPENDENCIES = {
|
||||
'posts': ['categories', 'pictures'], // posts depend on categories and pictures
|
||||
'pages': ['categories', 'pictures', 'translations'],
|
||||
'categories': ['types'],
|
||||
'translations': [], // widget/category translations (wt:* keys)
|
||||
'feed': ['posts', 'pages', 'categories'],
|
||||
'auth': [] // No dependencies, standalone
|
||||
};
|
||||
constructor() { }
|
||||
static getInstance() {
|
||||
if (!AppCache.instance) {
|
||||
AppCache.instance = new AppCache();
|
||||
}
|
||||
return AppCache.instance;
|
||||
}
|
||||
async get(type) {
|
||||
const cache = getCache();
|
||||
const val = await cache.get(type);
|
||||
return val;
|
||||
}
|
||||
async set(type, data, ttl) {
|
||||
const cache = getCache();
|
||||
await cache.set(type, data, ttl);
|
||||
}
|
||||
/**
|
||||
* Silent cache invalidation — clears cache for the given type and
|
||||
* cascades to dependents. Does NOT emit SSE events.
|
||||
* Use `notify()` in route handlers for explicit SSE.
|
||||
*/
|
||||
async invalidate(type) {
|
||||
const cache = getCache();
|
||||
if (type === 'feed') {
|
||||
await cache.flush('*-feed*');
|
||||
await cache.flush('home-feed*');
|
||||
}
|
||||
else if (type === 'translations') {
|
||||
await cache.flush('wt:*');
|
||||
await cache.flush('page-details-*');
|
||||
}
|
||||
else {
|
||||
await cache.del(type);
|
||||
}
|
||||
// Find types that depend on this type
|
||||
const dependents = Object.keys(AppCache.DEPENDENCIES).filter(key => AppCache.DEPENDENCIES[key].includes(type));
|
||||
logger.info({ type, dependents }, 'Cache invalidated');
|
||||
if (dependents.length > 0) {
|
||||
await Promise.all(dependents.map(dep => this.invalidate(dep)));
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Flush cache entries by pattern. Silent — no SSE.
|
||||
*/
|
||||
async flush(pattern) {
|
||||
const cache = getCache();
|
||||
await cache.flush(pattern);
|
||||
logger.info({ pattern: pattern || 'all' }, 'Cache flushed');
|
||||
}
|
||||
/**
|
||||
* Emit exactly 1 SSE event to notify clients of a change.
|
||||
* Call this in route handlers AFTER cache invalidation.
|
||||
*
|
||||
* @param type - Entity type (e.g. 'post', 'page', 'category', 'picture')
|
||||
* @param id - Entity ID (null for list-level / system changes)
|
||||
* @param action - The mutation that occurred
|
||||
*/
|
||||
notify(type, id, action) {
|
||||
logger.info({ type, id, action }, 'Cache notify');
|
||||
appEvents.emitUpdate(type, action, { id }, 'cache');
|
||||
}
|
||||
inspect() {
|
||||
const cache = getCache();
|
||||
return {
|
||||
info: cache.info(),
|
||||
dependencies: AppCache.DEPENDENCIES,
|
||||
entries: cache.entries(),
|
||||
};
|
||||
}
|
||||
}
|
||||
export const appCache = AppCache.getInstance();
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY2FjaGUuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvY2FjaGUudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLFFBQVEsRUFBRSxNQUFNLDBCQUEwQixDQUFDO0FBQ3BELE9BQU8sRUFBRSxTQUFTLEVBQUUsTUFBTSxhQUFhLENBQUM7QUFFeEMsT0FBTyxJQUFJLE1BQU0sTUFBTSxDQUFDO0FBQ3hCLE9BQU8sSUFBSSxNQUFNLE1BQU0sQ0FBQztBQUV4QixNQUFNLE9BQU8sR0FBRyxJQUFJLENBQUMsSUFBSSxDQUFDLE9BQU8sQ0FBQyxHQUFHLEVBQUUsRUFBRSxNQUFNLEVBQUUsWUFBWSxDQUFDLENBQUM7QUFFL0QsTUFBTSxhQUFhLEdBQUcsSUFBSSxDQUFDLFNBQVMsQ0FBQztJQUNqQyxNQUFNLEVBQUUsV0FBVztJQUNuQixPQUFPLEVBQUUsRUFBRSxXQUFXLEVBQUUsT0FBTyxFQUFFLEtBQUssRUFBRSxJQUFJLEVBQUU7Q0FDakQsQ0FBQyxDQUFDO0FBRUgsTUFBTSxNQUFNLEdBQUcsSUFBSSxDQUNmO0lBQ0ksS0FBSyxFQUFFLE9BQU8sQ0FBQyxHQUFHLENBQUMsY0FBYyxJQUFJLE1BQU07SUFDM0MsSUFBSSxFQUFFLEVBQUUsT0FBTyxFQUFFLE9BQU8sRUFBRTtJQUMxQixTQUFTLEVBQUUsSUFBSSxDQUFDLGdCQUFnQixDQUFDLE9BQU87Q0FDM0MsRUFDRCxJQUFJLENBQUMsV0FBVyxDQUFDO0lBQ2IsRUFBRSxNQUFNLEVBQUUsYUFBYSxFQUFFLEtBQUssRUFBRSxNQUFNLEVBQUU7Q0FDM0MsQ0FBQyxDQUNMLENBQUM7QUFHRixNQUFNLE9BQU8sUUFBUTtJQUNULE1BQU0sQ0FBQyxRQUFRLENBQVc7SUFFbEMsc0NBQXNDO0lBQ3RDLHFDQUFxQztJQUNyQyxzR0FBc0c7SUFDOUYsTUFBTSxDQUFDLFlBQVksR0FBNkI7UUFDcEQsT0FBTyxFQUFFLENBQUMsWUFBWSxFQUFFLFVBQVUsQ0FBQyxFQUFFLDBDQUEwQztRQUMvRSxPQUFPLEVBQUUsQ0FBQyxZQUFZLEVBQUUsVUFBVSxFQUFFLGNBQWMsQ0FBQztRQUNuRCxZQUFZLEVBQUUsQ0FBQyxPQUFPLENBQUM7UUFDdkIsY0FBYyxFQUFFLEVBQUUsRUFBRSwyQ0FBMkM7UUFDL0QsTUFBTSxFQUFFLENBQUMsT0FBTyxFQUFFLE9BQU8sRUFBRSxZQUFZLENBQUM7UUFDeEMsTUFBTSxFQUFFLEVBQUUsQ0FBQyw4QkFBOEI7S0FDNUMsQ0FBQztJQUVGLGdCQUF3QixDQUFDO0lBRWxCLE1BQU0sQ0FBQyxXQUFXO1FBQ3JCLElBQUksQ0FBQyxRQUFRLENBQUMsUUFBUSxFQUFFLENBQUM7WUFDckIsUUFBUSxDQUFDLFFBQVEsR0FBRyxJQUFJLFFBQVEsRUFBRSxDQUFDO1FBQ3ZDLENBQUM7UUFDRCxPQUFPLFFBQVEsQ0FBQyxRQUFRLENBQUM7SUFDN0IsQ0FBQztJQUVNLEtBQUssQ0FBQyxHQUFHLENBQUksSUFBWTtRQUM1QixNQUFNLEtBQUssR0FBRyxRQUFRLEVBQUUsQ0FBQztRQUN6QixNQUFNLEdBQUcsR0FBRyxNQUFNLEtBQUssQ0FBQyxHQUFHLENBQUksSUFBSSxDQUFDLENBQUM7UUFDckMsT0FBTyxHQUFHLENBQUM7SUFDZixDQUFDO0lBRU0sS0FBSyxDQUFDLEdBQUcsQ0FBSSxJQUFZLEVBQUUsSUFBTyxFQUFFLEdBQVk7UUFDbkQsTUFBTSxLQUFLLEdBQUcsUUFBUSxFQUFFLENBQUM7UUFDekIsTUFBTSxLQUFLLENBQUMsR0FBRyxDQUFDLElBQUksRUFBRSxJQUFJLEVBQUUsR0FBRyxDQUFDLENBQUM7SUFDckMsQ0FBQztJQUVEOzs7O09BSUc7SUFDSSxLQUFLLENBQUMsVUFBVSxDQUFDLElBQVk7UUFDaEMsTUFBTSxLQUFLLEdBQUcsUUFBUSxFQUFFLENBQUM7UUFFekIsSUFBSSxJQUFJLEtBQUssTUFBTSxFQUFFLENBQUM7WUFDbEIsTUFBTSxLQUFLLENBQUMsS0FBSyxDQUFDLFNBQVMsQ0FBQyxDQUFDO1lBQzdCLE1BQU0sS0FBSyxDQUFDLEtBQUssQ0FBQyxZQUFZLENBQUMsQ0FBQztRQUNwQyxDQUFDO2FBQU0sSUFBSSxJQUFJLEtBQUssY0FBYyxFQUFFLENBQUM7WUFDakMsTUFBTSxLQUFLLENBQUMsS0FBSyxDQUFDLE1BQU0sQ0FBQyxDQUFDO1lBQzFCLE1BQU0sS0FBSyxDQUFDLEtBQUssQ0FBQyxnQkFBZ0IsQ0FBQyxDQUFDO1FBQ3hDLENBQUM7YUFBTSxDQUFDO1lBQ0osTUFBTSxLQUFLLENBQUMsR0FBRyxDQUFDLElBQUksQ0FBQyxDQUFDO1FBQzFCLENBQUM7UUFFRCxzQ0FBc0M7UUFDdEMsTUFBTSxVQUFVLEdBQUcsTUFBTSxDQUFDLElBQUksQ0FBQyxRQUFRLENBQUMsWUFBWSxDQUFDLENBQUMsTUFBTSxDQUFDLEdBQUcsQ0FBQyxFQUFFLENBQy9ELFFBQVEsQ0FBQyxZQUFZLENBQUMsR0FBRyxDQUFDLENBQUMsUUFBUSxDQUFDLElBQUksQ0FBQyxDQUM1QyxDQUFDO1FBRUYsTUFBTSxDQUFDLElBQUksQ0FBQyxFQUFFLElBQUksRUFBRSxVQUFVLEVBQUUsRUFBRSxtQkFBbUIsQ0FBQyxDQUFDO1FBRXZELElBQUksVUFBVSxDQUFDLE1BQU0sR0FBRyxDQUFDLEVBQUUsQ0FBQztZQUN4QixNQUFNLE9BQU8sQ0FBQyxHQUFHLENBQUMsVUFBVSxDQUFDLEdBQUcsQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDLElBQUksQ0FBQyxVQUFVLENBQUMsR0FBRyxDQUFDLENBQUMsQ0FBQyxDQUFDO1FBQ25FLENBQUM7SUFDTCxDQUFDO0lBRUQ7O09BRUc7SUFDSSxLQUFLLENBQUMsS0FBSyxDQUFDLE9BQWdCO1FBQy9CLE1BQU0sS0FBSyxHQUFHLFFBQVEsRUFBRSxDQUFDO1FBQ3pCLE1BQU0sS0FBSyxDQUFDLEtBQUssQ0FBQyxPQUFPLENBQUMsQ0FBQztRQUMzQixNQUFNLENBQUMsSUFBSSxDQUFDLEVBQUUsT0FBTyxFQUFFLE9BQU8sSUFBSSxLQUFLLEVBQUUsRUFBRSxlQUFlLENBQUMsQ0FBQztJQUNoRSxDQUFDO0lBRUQ7Ozs7Ozs7T0FPRztJQUNJLE1BQU0sQ0FBQyxJQUFZLEVBQUUsRUFBaUIsRUFBRSxNQUFzQztRQUNqRixNQUFNLENBQUMsSUFBSSxDQUFDLEVBQUUsSUFBSSxFQUFFLEVBQUUsRUFBRSxNQUFNLEVBQUUsRUFBRSxjQUFjLENBQUMsQ0FBQztRQUNsRCxTQUFTLENBQUMsVUFBVSxDQUFDLElBQUksRUFBRSxNQUFNLEVBQUUsRUFBRSxFQUFFLEVBQUUsRUFBRSxPQUFPLENBQUMsQ0FBQztJQUN4RCxDQUFDO0lBRU0sT0FBTztRQUNWLE1BQU0sS0FBSyxHQUFHLFFBQVEsRUFBRSxDQUFDO1FBQ3pCLE9BQU87WUFDSCxJQUFJLEVBQUUsS0FBSyxDQUFDLElBQUksRUFBRTtZQUNsQixZQUFZLEVBQUUsUUFBUSxDQUFDLFlBQVk7WUFDbkMsT0FBTyxFQUFFLEtBQUssQ0FBQyxPQUFPLEVBQUU7U0FDM0IsQ0FBQztJQUNOLENBQUM7O0FBR0wsTUFBTSxDQUFDLE1BQU0sUUFBUSxHQUFHLFFBQVEsQ0FBQyxXQUFXLEVBQUUsQ0FBQyJ9
|
||||
57
dist-in/commons/cache/MemoryCache.js
vendored
Normal file
57
dist-in/commons/cache/MemoryCache.js
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
import { LRUCache } from 'lru-cache';
|
||||
export class MemoryCache {
|
||||
cache;
|
||||
constructor() {
|
||||
const defaultTtl = process.env.CACHE_DEFAULT_TTL ? parseInt(process.env.CACHE_DEFAULT_TTL) : 1000 * 60 * 5; // 5 mins default
|
||||
this.cache = new LRUCache({
|
||||
max: 1000,
|
||||
ttl: defaultTtl,
|
||||
updateAgeOnGet: false,
|
||||
});
|
||||
}
|
||||
async get(key) {
|
||||
const value = this.cache.get(key);
|
||||
return value || null;
|
||||
}
|
||||
async set(key, value, ttl) {
|
||||
this.cache.set(key, value, { ttl: ttl ? ttl * 1000 : undefined });
|
||||
}
|
||||
async del(key) {
|
||||
this.cache.delete(key);
|
||||
}
|
||||
async flush(pattern) {
|
||||
if (pattern) {
|
||||
// Support simple wildcard patterns (e.g. "home-feed*", "*-feed*")
|
||||
// Escape special regex chars except *, then replace * with .*
|
||||
const regexPattern = pattern
|
||||
.replace(/[.+^${}()|[\]\\]/g, '\\$&') // Escape regex chars
|
||||
.replace(/\*/g, '.*'); // Convert * to .*
|
||||
const regex = new RegExp(`^${regexPattern}$`);
|
||||
for (const key of this.cache.keys()) {
|
||||
if (typeof key === 'string' && regex.test(key)) {
|
||||
this.cache.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.cache.clear();
|
||||
}
|
||||
}
|
||||
keys() {
|
||||
return [...this.cache.keys()];
|
||||
}
|
||||
info() {
|
||||
return {
|
||||
size: this.cache.size,
|
||||
max: this.cache.max,
|
||||
provider: 'memory-lru',
|
||||
};
|
||||
}
|
||||
entries() {
|
||||
return [...this.cache.keys()].map(key => ({
|
||||
key,
|
||||
remainingTTL: this.cache.getRemainingTTL(key),
|
||||
}));
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiTWVtb3J5Q2FjaGUuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvY29tbW9ucy9jYWNoZS9NZW1vcnlDYWNoZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsUUFBUSxFQUFFLE1BQU0sV0FBVyxDQUFDO0FBR3JDLE1BQU0sT0FBTyxXQUFXO0lBQ1osS0FBSyxDQUF3QjtJQUVyQztRQUNJLE1BQU0sVUFBVSxHQUFHLE9BQU8sQ0FBQyxHQUFHLENBQUMsaUJBQWlCLENBQUMsQ0FBQyxDQUFDLFFBQVEsQ0FBQyxPQUFPLENBQUMsR0FBRyxDQUFDLGlCQUFpQixDQUFDLENBQUMsQ0FBQyxDQUFDLElBQUksR0FBRyxFQUFFLEdBQUcsQ0FBQyxDQUFDLENBQUMsaUJBQWlCO1FBQzdILElBQUksQ0FBQyxLQUFLLEdBQUcsSUFBSSxRQUFRLENBQUM7WUFDdEIsR0FBRyxFQUFFLElBQUk7WUFDVCxHQUFHLEVBQUUsVUFBVTtZQUNmLGNBQWMsRUFBRSxLQUFLO1NBQ3hCLENBQUMsQ0FBQztJQUNQLENBQUM7SUFFRCxLQUFLLENBQUMsR0FBRyxDQUFJLEdBQVc7UUFDcEIsTUFBTSxLQUFLLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxHQUFHLENBQUMsR0FBRyxDQUFDLENBQUM7UUFDbEMsT0FBUSxLQUFXLElBQUksSUFBSSxDQUFDO0lBQ2hDLENBQUM7SUFFRCxLQUFLLENBQUMsR0FBRyxDQUFJLEdBQVcsRUFBRSxLQUFRLEVBQUUsR0FBWTtRQUM1QyxJQUFJLENBQUMsS0FBSyxDQUFDLEdBQUcsQ0FBQyxHQUFHLEVBQUUsS0FBSyxFQUFFLEVBQUUsR0FBRyxFQUFFLEdBQUcsQ0FBQyxDQUFDLENBQUMsR0FBRyxHQUFHLElBQUksQ0FBQyxDQUFDLENBQUMsU0FBUyxFQUFFLENBQUMsQ0FBQztJQUN0RSxDQUFDO0lBRUQsS0FBSyxDQUFDLEdBQUcsQ0FBQyxHQUFXO1FBQ2pCLElBQUksQ0FBQyxLQUFLLENBQUMsTUFBTSxDQUFDLEdBQUcsQ0FBQyxDQUFDO0lBQzNCLENBQUM7SUFFRCxLQUFLLENBQUMsS0FBSyxDQUFDLE9BQWdCO1FBQ3hCLElBQUksT0FBTyxFQUFFLENBQUM7WUFDVixrRUFBa0U7WUFDbEUsOERBQThEO1lBQzlELE1BQU0sWUFBWSxHQUFHLE9BQU87aUJBQ3ZCLE9BQU8sQ0FBQyxtQkFBbUIsRUFBRSxNQUFNLENBQUMsQ0FBQyxxQkFBcUI7aUJBQzFELE9BQU8sQ0FBQyxLQUFLLEVBQUUsSUFBSSxDQUFDLENBQUMsQ0FBaUIsa0JBQWtCO1lBRTdELE1BQU0sS0FBSyxHQUFHLElBQUksTUFBTSxDQUFDLElBQUksWUFBWSxHQUFHLENBQUMsQ0FBQztZQUU5QyxLQUFLLE1BQU0sR0FBRyxJQUFJLElBQUksQ0FBQyxLQUFLLENBQUMsSUFBSSxFQUFFLEVBQUUsQ0FBQztnQkFDbEMsSUFBSSxPQUFPLEdBQUcsS0FBSyxRQUFRLElBQUksS0FBSyxDQUFDLElBQUksQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDO29CQUM3QyxJQUFJLENBQUMsS0FBSyxDQUFDLE1BQU0sQ0FBQyxHQUFHLENBQUMsQ0FBQztnQkFDM0IsQ0FBQztZQUNMLENBQUM7UUFDTCxDQUFDO2FBQU0sQ0FBQztZQUNKLElBQUksQ0FBQyxLQUFLLENBQUMsS0FBSyxFQUFFLENBQUM7UUFDdkIsQ0FBQztJQUNMLENBQUM7SUFFRCxJQUFJO1FBQ0EsT0FBTyxDQUFDLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxJQUFJLEVBQUUsQ0FBQyxDQUFDO0lBQ2xDLENBQUM7SUFFRCxJQUFJO1FBQ0EsT0FBTztZQUNILElBQUksRUFBRSxJQUFJLENBQUMsS0FBSyxDQUFDLElBQUk7WUFDckIsR0FBRyxFQUFFLElBQUksQ0FBQyxLQUFLLENBQUMsR0FBRztZQUNuQixRQUFRLEVBQUUsWUFBWTtTQUN6QixDQUFDO0lBQ04sQ0FBQztJQUVELE9BQU87UUFDSCxPQUFPLENBQUMsR0FBRyxJQUFJLENBQUMsS0FBSyxDQUFDLElBQUksRUFBRSxDQUFDLENBQUMsR0FBRyxDQUFDLEdBQUcsQ0FBQyxFQUFFLENBQUMsQ0FBQztZQUN0QyxHQUFHO1lBQ0gsWUFBWSxFQUFFLElBQUksQ0FBQyxLQUFLLENBQUMsZUFBZSxDQUFDLEdBQUcsQ0FBQztTQUNoRCxDQUFDLENBQUMsQ0FBQztJQUNSLENBQUM7Q0FDSiJ9
|
||||
14
dist-in/commons/cache/index.js
vendored
Normal file
14
dist-in/commons/cache/index.js
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
import { MemoryCache } from './MemoryCache.js';
|
||||
// Design Pattern: Singleton or Factory
|
||||
// For now, we export a singleton instance based on ENV or default to Memory
|
||||
// Future: Read process.env.CACHE_PROVIDER == 'redis'
|
||||
let instance = null;
|
||||
export const getCache = () => {
|
||||
if (!instance) {
|
||||
instance = new MemoryCache();
|
||||
}
|
||||
return instance;
|
||||
};
|
||||
export * from './types.js';
|
||||
export * from './MemoryCache.js';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvY29tbW9ucy9jYWNoZS9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFDQSxPQUFPLEVBQUUsV0FBVyxFQUFFLE1BQU0sa0JBQWtCLENBQUM7QUFFL0MsdUNBQXVDO0FBQ3ZDLDRFQUE0RTtBQUM1RSxxREFBcUQ7QUFFckQsSUFBSSxRQUFRLEdBQXdCLElBQUksQ0FBQztBQUV6QyxNQUFNLENBQUMsTUFBTSxRQUFRLEdBQUcsR0FBaUIsRUFBRTtJQUN2QyxJQUFJLENBQUMsUUFBUSxFQUFFLENBQUM7UUFDWixRQUFRLEdBQUcsSUFBSSxXQUFXLEVBQUUsQ0FBQztJQUNqQyxDQUFDO0lBQ0QsT0FBTyxRQUFRLENBQUM7QUFDcEIsQ0FBQyxDQUFDO0FBRUYsY0FBYyxZQUFZLENBQUM7QUFDM0IsY0FBYyxrQkFBa0IsQ0FBQyJ9
|
||||
2
dist-in/commons/cache/types.js
vendored
Normal file
2
dist-in/commons/cache/types.js
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
export {};
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidHlwZXMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvY29tbW9ucy9jYWNoZS90eXBlcy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiIn0=
|
||||
195
dist-in/commons/decorators.js
Normal file
195
dist-in/commons/decorators.js
Normal file
File diff suppressed because one or more lines are too long
169
dist-in/commons/log-routes-factory.js
Normal file
169
dist-in/commons/log-routes-factory.js
Normal file
File diff suppressed because one or more lines are too long
67
dist-in/commons/logger.js
Normal file
67
dist-in/commons/logger.js
Normal file
@ -0,0 +1,67 @@
|
||||
import pino from 'pino';
|
||||
import path from 'path';
|
||||
import { mkdirSync } from 'fs';
|
||||
// Ensure logs directory exists
|
||||
try {
|
||||
mkdirSync(path.join(process.cwd(), 'logs'), { recursive: true });
|
||||
}
|
||||
catch (err) {
|
||||
// Directory already exists
|
||||
}
|
||||
const fileTransport = pino.transport({
|
||||
target: 'pino/file',
|
||||
options: { destination: path.join(process.cwd(), 'app.log') },
|
||||
});
|
||||
const consoleTransport = pino.transport({
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
colorize: true,
|
||||
ignore: 'pid,hostname',
|
||||
destination: 1,
|
||||
},
|
||||
});
|
||||
export const logger = pino({
|
||||
level: process.env.PINO_LOG_LEVEL || 'info',
|
||||
formatters: {
|
||||
level: (label) => {
|
||||
return { level: label.toUpperCase() };
|
||||
},
|
||||
},
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
}, pino.multistream([
|
||||
{ stream: fileTransport, level: 'info' },
|
||||
{ stream: consoleTransport, level: 'info' },
|
||||
]));
|
||||
// Security logger - writes to logs/security.json
|
||||
const securityFileTransport = pino.transport({
|
||||
target: 'pino/file',
|
||||
options: {
|
||||
destination: path.join(process.cwd(), 'logs', 'security.json'),
|
||||
mkdir: true
|
||||
},
|
||||
});
|
||||
const securityConsoleTransport = pino.transport({
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
colorize: true,
|
||||
ignore: 'pid,hostname',
|
||||
destination: 1,
|
||||
},
|
||||
});
|
||||
export const securityLogger = pino({
|
||||
level: process.env.PINO_LOG_LEVEL || 'info',
|
||||
formatters: {
|
||||
level: (label) => {
|
||||
return { level: label.toUpperCase() };
|
||||
},
|
||||
},
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
base: {
|
||||
logger: 'security'
|
||||
}
|
||||
}, pino.multistream([
|
||||
{ stream: securityFileTransport, level: 'info' },
|
||||
{ stream: securityConsoleTransport, level: 'info' },
|
||||
]));
|
||||
export default logger;
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibG9nZ2VyLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL2NvbW1vbnMvbG9nZ2VyLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sSUFBSSxNQUFNLE1BQU0sQ0FBQztBQUN4QixPQUFPLElBQUksTUFBTSxNQUFNLENBQUM7QUFDeEIsT0FBTyxFQUFFLFNBQVMsRUFBRSxNQUFNLElBQUksQ0FBQztBQUUvQiwrQkFBK0I7QUFDL0IsSUFBSSxDQUFDO0lBQ0QsU0FBUyxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsT0FBTyxDQUFDLEdBQUcsRUFBRSxFQUFFLE1BQU0sQ0FBQyxFQUFFLEVBQUUsU0FBUyxFQUFFLElBQUksRUFBRSxDQUFDLENBQUM7QUFDckUsQ0FBQztBQUFDLE9BQU8sR0FBRyxFQUFFLENBQUM7SUFDWCwyQkFBMkI7QUFDL0IsQ0FBQztBQUVELE1BQU0sYUFBYSxHQUFHLElBQUksQ0FBQyxTQUFTLENBQUM7SUFDakMsTUFBTSxFQUFFLFdBQVc7SUFDbkIsT0FBTyxFQUFFLEVBQUUsV0FBVyxFQUFFLElBQUksQ0FBQyxJQUFJLENBQUMsT0FBTyxDQUFDLEdBQUcsRUFBRSxFQUFFLFNBQVMsQ0FBQyxFQUFFO0NBQ2hFLENBQUMsQ0FBQztBQUVILE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxDQUFDLFNBQVMsQ0FBQztJQUNwQyxNQUFNLEVBQUUsYUFBYTtJQUNyQixPQUFPLEVBQUU7UUFDTCxRQUFRLEVBQUUsSUFBSTtRQUNkLE1BQU0sRUFBRSxjQUFjO1FBQ3RCLFdBQVcsRUFBRSxDQUFDO0tBQ2pCO0NBQ0osQ0FBQyxDQUFDO0FBRUgsTUFBTSxDQUFDLE1BQU0sTUFBTSxHQUFHLElBQUksQ0FDdEI7SUFDSSxLQUFLLEVBQUUsT0FBTyxDQUFDLEdBQUcsQ0FBQyxjQUFjLElBQUksTUFBTTtJQUMzQyxVQUFVLEVBQUU7UUFDUixLQUFLLEVBQUUsQ0FBQyxLQUFLLEVBQUUsRUFBRTtZQUNiLE9BQU8sRUFBRSxLQUFLLEVBQUUsS0FBSyxDQUFDLFdBQVcsRUFBRSxFQUFFLENBQUM7UUFDMUMsQ0FBQztLQUNKO0lBQ0QsU0FBUyxFQUFFLElBQUksQ0FBQyxnQkFBZ0IsQ0FBQyxPQUFPO0NBQzNDLEVBQ0QsSUFBSSxDQUFDLFdBQVcsQ0FBQztJQUNiLEVBQUUsTUFBTSxFQUFFLGFBQWEsRUFBRSxLQUFLLEVBQUUsTUFBTSxFQUFFO0lBQ3hDLEVBQUUsTUFBTSxFQUFFLGdCQUFnQixFQUFFLEtBQUssRUFBRSxNQUFNLEVBQUU7Q0FDOUMsQ0FBQyxDQUNMLENBQUM7QUFFRixpREFBaUQ7QUFDakQsTUFBTSxxQkFBcUIsR0FBRyxJQUFJLENBQUMsU0FBUyxDQUFDO0lBQ3pDLE1BQU0sRUFBRSxXQUFXO0lBQ25CLE9BQU8sRUFBRTtRQUNMLFdBQVcsRUFBRSxJQUFJLENBQUMsSUFBSSxDQUFDLE9BQU8sQ0FBQyxHQUFHLEVBQUUsRUFBRSxNQUFNLEVBQUUsZUFBZSxDQUFDO1FBQzlELEtBQUssRUFBRSxJQUFJO0tBQ2Q7Q0FDSixDQUFDLENBQUM7QUFFSCxNQUFNLHdCQUF3QixHQUFHLElBQUksQ0FBQyxTQUFTLENBQUM7SUFDNUMsTUFBTSxFQUFFLGFBQWE7SUFDckIsT0FBTyxFQUFFO1FBQ0wsUUFBUSxFQUFFLElBQUk7UUFDZCxNQUFNLEVBQUUsY0FBYztRQUN0QixXQUFXLEVBQUUsQ0FBQztLQUNqQjtDQUNKLENBQUMsQ0FBQztBQUVILE1BQU0sQ0FBQyxNQUFNLGNBQWMsR0FBRyxJQUFJLENBQzlCO0lBQ0ksS0FBSyxFQUFFLE9BQU8sQ0FBQyxHQUFHLENBQUMsY0FBYyxJQUFJLE1BQU07SUFDM0MsVUFBVSxFQUFFO1FBQ1IsS0FBSyxFQUFFLENBQUMsS0FBSyxFQUFFLEVBQUU7WUFDYixPQUFPLEVBQUUsS0FBSyxFQUFFLEtBQUssQ0FBQyxXQUFXLEVBQUUsRUFBRSxDQUFDO1FBQzFDLENBQUM7S0FDSjtJQUNELFNBQVMsRUFBRSxJQUFJLENBQUMsZ0JBQWdCLENBQUMsT0FBTztJQUN4QyxJQUFJLEVBQUU7UUFDRixNQUFNLEVBQUUsVUFBVTtLQUNyQjtDQUNKLEVBQ0QsSUFBSSxDQUFDLFdBQVcsQ0FBQztJQUNiLEVBQUUsTUFBTSxFQUFFLHFCQUFxQixFQUFFLEtBQUssRUFBRSxNQUFNLEVBQUU7SUFDaEQsRUFBRSxNQUFNLEVBQUUsd0JBQXdCLEVBQUUsS0FBSyxFQUFFLE1BQU0sRUFBRTtDQUN0RCxDQUFDLENBQ0wsQ0FBQztBQUVGLGVBQWUsTUFBTSxDQUFDIn0=
|
||||
165
dist-in/commons/registry.js
Normal file
165
dist-in/commons/registry.js
Normal file
File diff suppressed because one or more lines are too long
67
dist-in/commons/supabase.js
Normal file
67
dist-in/commons/supabase.js
Normal file
@ -0,0 +1,67 @@
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
import 'dotenv/config';
|
||||
const supabaseUrl = process.env.SUPABASE_URL;
|
||||
const supabaseKey = process.env.SUPABASE_SERVICE_KEY;
|
||||
import { logger } from './logger.js';
|
||||
if (!supabaseUrl || !supabaseKey) {
|
||||
logger.error({
|
||||
hasUrl: !!supabaseUrl,
|
||||
hasKey: !!supabaseKey,
|
||||
env: process.env.NODE_ENV
|
||||
}, 'Missing Supabase environment variables');
|
||||
// process.exit(1) // Don't exit in test mode, throw instead
|
||||
if (process.env.NODE_ENV !== 'test')
|
||||
process.exit(1);
|
||||
throw new Error('Missing Supabase environment variables: URL or Key is undefined');
|
||||
}
|
||||
export const supabase = createClient(supabaseUrl, supabaseKey);
|
||||
// --- Auth Cache (in-process Map for speed) ---
|
||||
const AUTH_CACHE_TTL = process.env.AUTH_CACHE_TTL ? parseInt(process.env.AUTH_CACHE_TTL) : 1000 * 60 * 1; // Default 1 minute
|
||||
const authMap = new Map();
|
||||
export const getUserCached = async (token) => {
|
||||
if (!token)
|
||||
return null;
|
||||
const now = Date.now();
|
||||
const cached = authMap.get(token);
|
||||
if (cached && (now - cached.timestamp < AUTH_CACHE_TTL)) {
|
||||
return cached.user;
|
||||
}
|
||||
try {
|
||||
const { data: { user }, error } = await supabase.auth.getUser(token);
|
||||
if (error || !user) {
|
||||
authMap.set(token, { user: null, timestamp: now });
|
||||
return null;
|
||||
}
|
||||
authMap.set(token, { user, timestamp: now });
|
||||
return user;
|
||||
}
|
||||
catch (err) {
|
||||
logger.error({ err }, 'Auth Cache Error');
|
||||
return null;
|
||||
}
|
||||
};
|
||||
/** Clear in-process auth cache (call after admin user ops, role changes, etc.) */
|
||||
export const flushAuthCache = (userId) => {
|
||||
if (!userId) {
|
||||
authMap.clear();
|
||||
return;
|
||||
}
|
||||
// Remove entries for a specific user
|
||||
for (const [token, entry] of authMap) {
|
||||
if (entry.user?.id === userId)
|
||||
authMap.delete(token);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Test Supabase connection by attempting a simple query
|
||||
*/
|
||||
export async function testSupabaseConnection() {
|
||||
try {
|
||||
const { error } = await supabase.from('products').select('id').limit(1);
|
||||
return !error;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic3VwYWJhc2UuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvY29tbW9ucy9zdXBhYmFzZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsWUFBWSxFQUFRLE1BQU0sdUJBQXVCLENBQUE7QUFDMUQsT0FBTyxlQUFlLENBQUE7QUFFdEIsTUFBTSxXQUFXLEdBQUcsT0FBTyxDQUFDLEdBQUcsQ0FBQyxZQUFZLENBQUE7QUFDNUMsTUFBTSxXQUFXLEdBQUcsT0FBTyxDQUFDLEdBQUcsQ0FBQyxvQkFBb0IsQ0FBQTtBQUVwRCxPQUFPLEVBQUUsTUFBTSxFQUFFLE1BQU0sYUFBYSxDQUFBO0FBRXBDLElBQUksQ0FBQyxXQUFXLElBQUksQ0FBQyxXQUFXLEVBQUUsQ0FBQztJQUMvQixNQUFNLENBQUMsS0FBSyxDQUFDO1FBQ1QsTUFBTSxFQUFFLENBQUMsQ0FBQyxXQUFXO1FBQ3JCLE1BQU0sRUFBRSxDQUFDLENBQUMsV0FBVztRQUNyQixHQUFHLEVBQUUsT0FBTyxDQUFDLEdBQUcsQ0FBQyxRQUFRO0tBQzVCLEVBQUUsd0NBQXdDLENBQUMsQ0FBQztJQUM3Qyw0REFBNEQ7SUFDNUQsSUFBSSxPQUFPLENBQUMsR0FBRyxDQUFDLFFBQVEsS0FBSyxNQUFNO1FBQUUsT0FBTyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsQ0FBQztJQUNyRCxNQUFNLElBQUksS0FBSyxDQUFDLGlFQUFpRSxDQUFDLENBQUM7QUFDdkYsQ0FBQztBQUVELE1BQU0sQ0FBQyxNQUFNLFFBQVEsR0FBRyxZQUFZLENBQUMsV0FBVyxFQUFFLFdBQVcsQ0FBQyxDQUFBO0FBRTlELGdEQUFnRDtBQUVoRCxNQUFNLGNBQWMsR0FBRyxPQUFPLENBQUMsR0FBRyxDQUFDLGNBQWMsQ0FBQyxDQUFDLENBQUMsUUFBUSxDQUFDLE9BQU8sQ0FBQyxHQUFHLENBQUMsY0FBYyxDQUFDLENBQUMsQ0FBQyxDQUFDLElBQUksR0FBRyxFQUFFLEdBQUcsQ0FBQyxDQUFDLENBQUMsbUJBQW1CO0FBRzdILE1BQU0sT0FBTyxHQUFHLElBQUksR0FBRyxFQUEwQixDQUFDO0FBRWxELE1BQU0sQ0FBQyxNQUFNLGFBQWEsR0FBRyxLQUFLLEVBQUUsS0FBYSxFQUF3QixFQUFFO0lBQ3ZFLElBQUksQ0FBQyxLQUFLO1FBQUUsT0FBTyxJQUFJLENBQUM7SUFFeEIsTUFBTSxHQUFHLEdBQUcsSUFBSSxDQUFDLEdBQUcsRUFBRSxDQUFDO0lBQ3ZCLE1BQU0sTUFBTSxHQUFHLE9BQU8sQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLENBQUM7SUFDbEMsSUFBSSxNQUFNLElBQUksQ0FBQyxHQUFHLEdBQUcsTUFBTSxDQUFDLFNBQVMsR0FBRyxjQUFjLENBQUMsRUFBRSxDQUFDO1FBQ3RELE9BQU8sTUFBTSxDQUFDLElBQUksQ0FBQztJQUN2QixDQUFDO0lBRUQsSUFBSSxDQUFDO1FBQ0QsTUFBTSxFQUFFLElBQUksRUFBRSxFQUFFLElBQUksRUFBRSxFQUFFLEtBQUssRUFBRSxHQUFHLE1BQU0sUUFBUSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsS0FBSyxDQUFDLENBQUM7UUFFckUsSUFBSSxLQUFLLElBQUksQ0FBQyxJQUFJLEVBQUUsQ0FBQztZQUNqQixPQUFPLENBQUMsR0FBRyxDQUFDLEtBQUssRUFBRSxFQUFFLElBQUksRUFBRSxJQUFJLEVBQUUsU0FBUyxFQUFFLEdBQUcsRUFBRSxDQUFDLENBQUM7WUFDbkQsT0FBTyxJQUFJLENBQUM7UUFDaEIsQ0FBQztRQUVELE9BQU8sQ0FBQyxHQUFHLENBQUMsS0FBSyxFQUFFLEVBQUUsSUFBSSxFQUFFLFNBQVMsRUFBRSxHQUFHLEVBQUUsQ0FBQyxDQUFDO1FBQzdDLE9BQU8sSUFBSSxDQUFDO0lBQ2hCLENBQUM7SUFBQyxPQUFPLEdBQUcsRUFBRSxDQUFDO1FBQ1gsTUFBTSxDQUFDLEtBQUssQ0FBQyxFQUFFLEdBQUcsRUFBRSxFQUFFLGtCQUFrQixDQUFDLENBQUM7UUFDMUMsT0FBTyxJQUFJLENBQUM7SUFDaEIsQ0FBQztBQUNMLENBQUMsQ0FBQztBQUVGLGtGQUFrRjtBQUNsRixNQUFNLENBQUMsTUFBTSxjQUFjLEdBQUcsQ0FBQyxNQUFlLEVBQUUsRUFBRTtJQUM5QyxJQUFJLENBQUMsTUFBTSxFQUFFLENBQUM7UUFDVixPQUFPLENBQUMsS0FBSyxFQUFFLENBQUM7UUFDaEIsT0FBTztJQUNYLENBQUM7SUFDRCxxQ0FBcUM7SUFDckMsS0FBSyxNQUFNLENBQUMsS0FBSyxFQUFFLEtBQUssQ0FBQyxJQUFJLE9BQU8sRUFBRSxDQUFDO1FBQ25DLElBQUksS0FBSyxDQUFDLElBQUksRUFBRSxFQUFFLEtBQUssTUFBTTtZQUFFLE9BQU8sQ0FBQyxNQUFNLENBQUMsS0FBSyxDQUFDLENBQUM7SUFDekQsQ0FBQztBQUNMLENBQUMsQ0FBQztBQUVGOztHQUVHO0FBQ0gsTUFBTSxDQUFDLEtBQUssVUFBVSxzQkFBc0I7SUFDeEMsSUFBSSxDQUFDO1FBQ0QsTUFBTSxFQUFFLEtBQUssRUFBRSxHQUFHLE1BQU0sUUFBUSxDQUFDLElBQUksQ0FBQyxVQUFVLENBQUMsQ0FBQyxNQUFNLENBQUMsSUFBSSxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxDQUFBO1FBQ3ZFLE9BQU8sQ0FBQyxLQUFLLENBQUE7SUFDakIsQ0FBQztJQUFDLE1BQU0sQ0FBQztRQUNMLE9BQU8sS0FBSyxDQUFBO0lBQ2hCLENBQUM7QUFDTCxDQUFDIn0=
|
||||
2
dist-in/commons/types.js
Normal file
2
dist-in/commons/types.js
Normal file
@ -0,0 +1,2 @@
|
||||
export {};
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidHlwZXMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvY29tbW9ucy90eXBlcy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiIn0=
|
||||
228
dist-in/commons/websocket.js
Normal file
228
dist-in/commons/websocket.js
Normal file
File diff suppressed because one or more lines are too long
115
dist-in/config/products.js
Normal file
115
dist-in/config/products.js
Normal file
@ -0,0 +1,115 @@
|
||||
/**
|
||||
* Product and Action Mapping Configuration
|
||||
* Defines all trackable products, their actions, and associated metadata
|
||||
*/
|
||||
export const PRODUCT_ACTIONS = {
|
||||
competitors: {
|
||||
search: {
|
||||
endpoint: '/api/competitors',
|
||||
method: 'GET',
|
||||
costUnits: 1.0,
|
||||
cancellable: true, // Search can be cancelled
|
||||
description: 'Search for competitors in a location',
|
||||
},
|
||||
get_details: {
|
||||
endpoint: '/api/competitors/:place_id',
|
||||
method: 'GET',
|
||||
costUnits: 0.0,
|
||||
cancellable: false, // Quick lookup, not cancellable
|
||||
description: 'Get details for a specific competitor',
|
||||
},
|
||||
stream: {
|
||||
endpoint: '/api/competitors/stream',
|
||||
method: 'GET',
|
||||
costUnits: 1.0, // Same cost as regular search
|
||||
cancellable: true,
|
||||
description: 'Stream competitors in real-time',
|
||||
},
|
||||
find_email: {
|
||||
endpoint: '/api/find/email/{place_id}',
|
||||
method: 'GET',
|
||||
costUnits: 2.0, // Higher cost due to Puppeteer usage
|
||||
cancellable: true, // Long-running, can be cancelled
|
||||
description: 'Find email addresses for a business using Puppeteer',
|
||||
},
|
||||
},
|
||||
images: {
|
||||
upload: {
|
||||
endpoint: '/api/images',
|
||||
method: 'POST',
|
||||
costUnits: 2.0,
|
||||
cancellable: true,
|
||||
description: 'Upload an image',
|
||||
},
|
||||
get: {
|
||||
endpoint: '/api/images/:id',
|
||||
method: 'GET',
|
||||
costUnits: 0.05,
|
||||
cancellable: false,
|
||||
description: 'Retrieve an image',
|
||||
},
|
||||
update: {
|
||||
endpoint: '/api/images/:id',
|
||||
method: 'PUT',
|
||||
costUnits: 1.5,
|
||||
cancellable: false,
|
||||
description: 'Update image metadata',
|
||||
},
|
||||
},
|
||||
mock: {
|
||||
job: {
|
||||
endpoint: '/api/mock/job',
|
||||
method: 'POST',
|
||||
costUnits: 0.0,
|
||||
cancellable: true,
|
||||
description: 'Mock job for testing',
|
||||
},
|
||||
},
|
||||
// Add more products here as they are developed
|
||||
};
|
||||
/**
|
||||
* Match a request path and method to a product and action
|
||||
*/
|
||||
export function identifyProductAction(path, method) {
|
||||
for (const [product, actions] of Object.entries(PRODUCT_ACTIONS)) {
|
||||
for (const [action, config] of Object.entries(actions)) {
|
||||
if (matchesRoute(path, config.endpoint) && method === config.method) {
|
||||
return { product, action, config };
|
||||
}
|
||||
}
|
||||
}
|
||||
return { product: null, action: null, config: null };
|
||||
}
|
||||
/**
|
||||
* Check if a path matches a route pattern (supports :param syntax)
|
||||
*/
|
||||
function matchesRoute(path, pattern) {
|
||||
// Convert pattern to regex
|
||||
// e.g., '/api/competitors/:place_id' or '/api/competitors/{place_id}' -> /^\/api\/competitors\/[^\/]+$/
|
||||
const regexPattern = pattern
|
||||
.replace(/:[^\/]+/g, '[^/]+') // Replace :param with regex
|
||||
.replace(/\{[^}]+\}/g, '[^/]+') // Replace {param} with regex
|
||||
.replace(/\//g, '\\/'); // Escape slashes
|
||||
// Allow optional trailing slash
|
||||
const regex = new RegExp(`^${regexPattern}\\/?$`);
|
||||
return regex.test(path);
|
||||
}
|
||||
/**
|
||||
* Get all products
|
||||
*/
|
||||
export function getAllProducts() {
|
||||
return Object.keys(PRODUCT_ACTIONS);
|
||||
}
|
||||
/**
|
||||
* Get all actions for a product
|
||||
*/
|
||||
export function getProductActions(product) {
|
||||
return Object.keys(PRODUCT_ACTIONS[product] || {});
|
||||
}
|
||||
/**
|
||||
* Get configuration for a specific product action
|
||||
*/
|
||||
export function getActionConfig(product, action) {
|
||||
return PRODUCT_ACTIONS[product]?.[action] || null;
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJvZHVjdHMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvY29uZmlnL3Byb2R1Y3RzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBOzs7R0FHRztBQWNILE1BQU0sQ0FBQyxNQUFNLGVBQWUsR0FBa0M7SUFDMUQsV0FBVyxFQUFFO1FBQ1QsTUFBTSxFQUFFO1lBQ0osUUFBUSxFQUFFLGtCQUFrQjtZQUM1QixNQUFNLEVBQUUsS0FBSztZQUNiLFNBQVMsRUFBRSxHQUFHO1lBQ2QsV0FBVyxFQUFFLElBQUksRUFBRywwQkFBMEI7WUFDOUMsV0FBVyxFQUFFLHNDQUFzQztTQUN0RDtRQUNELFdBQVcsRUFBRTtZQUNULFFBQVEsRUFBRSw0QkFBNEI7WUFDdEMsTUFBTSxFQUFFLEtBQUs7WUFDYixTQUFTLEVBQUUsR0FBRztZQUNkLFdBQVcsRUFBRSxLQUFLLEVBQUcsZ0NBQWdDO1lBQ3JELFdBQVcsRUFBRSx1Q0FBdUM7U0FDdkQ7UUFDRCxNQUFNLEVBQUU7WUFDSixRQUFRLEVBQUUseUJBQXlCO1lBQ25DLE1BQU0sRUFBRSxLQUFLO1lBQ2IsU0FBUyxFQUFFLEdBQUcsRUFBRyw4QkFBOEI7WUFDL0MsV0FBVyxFQUFFLElBQUk7WUFDakIsV0FBVyxFQUFFLGlDQUFpQztTQUNqRDtRQUNELFVBQVUsRUFBRTtZQUNSLFFBQVEsRUFBRSw0QkFBNEI7WUFDdEMsTUFBTSxFQUFFLEtBQUs7WUFDYixTQUFTLEVBQUUsR0FBRyxFQUFHLHFDQUFxQztZQUN0RCxXQUFXLEVBQUUsSUFBSSxFQUFHLGlDQUFpQztZQUNyRCxXQUFXLEVBQUUscURBQXFEO1NBQ3JFO0tBQ0o7SUFDRCxNQUFNLEVBQUU7UUFDSixNQUFNLEVBQUU7WUFDSixRQUFRLEVBQUUsYUFBYTtZQUN2QixNQUFNLEVBQUUsTUFBTTtZQUNkLFNBQVMsRUFBRSxHQUFHO1lBQ2QsV0FBVyxFQUFFLElBQUk7WUFDakIsV0FBVyxFQUFFLGlCQUFpQjtTQUNqQztRQUNELEdBQUcsRUFBRTtZQUNELFFBQVEsRUFBRSxpQkFBaUI7WUFDM0IsTUFBTSxFQUFFLEtBQUs7WUFDYixTQUFTLEVBQUUsSUFBSTtZQUNmLFdBQVcsRUFBRSxLQUFLO1lBQ2xCLFdBQVcsRUFBRSxtQkFBbUI7U0FDbkM7UUFDRCxNQUFNLEVBQUU7WUFDSixRQUFRLEVBQUUsaUJBQWlCO1lBQzNCLE1BQU0sRUFBRSxLQUFLO1lBQ2IsU0FBUyxFQUFFLEdBQUc7WUFDZCxXQUFXLEVBQUUsS0FBSztZQUNsQixXQUFXLEVBQUUsdUJBQXVCO1NBQ3ZDO0tBQ0o7SUFDRCxJQUFJLEVBQUU7UUFDRixHQUFHLEVBQUU7WUFDRCxRQUFRLEVBQUUsZUFBZTtZQUN6QixNQUFNLEVBQUUsTUFBTTtZQUNkLFNBQVMsRUFBRSxHQUFHO1lBQ2QsV0FBVyxFQUFFLElBQUk7WUFDakIsV0FBVyxFQUFFLHNCQUFzQjtTQUN0QztLQUNKO0lBQ0QsK0NBQStDO0NBQ3pDLENBQUM7QUFFWDs7R0FFRztBQUNILE1BQU0sVUFBVSxxQkFBcUIsQ0FBQyxJQUFZLEVBQUUsTUFBYztJQUs5RCxLQUFLLE1BQU0sQ0FBQyxPQUFPLEVBQUUsT0FBTyxDQUFDLElBQUksTUFBTSxDQUFDLE9BQU8sQ0FBQyxlQUFlLENBQUMsRUFBRSxDQUFDO1FBQy9ELEtBQUssTUFBTSxDQUFDLE1BQU0sRUFBRSxNQUFNLENBQUMsSUFBSSxNQUFNLENBQUMsT0FBTyxDQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7WUFDckQsSUFBSSxZQUFZLENBQUMsSUFBSSxFQUFFLE1BQU0sQ0FBQyxRQUFRLENBQUMsSUFBSSxNQUFNLEtBQUssTUFBTSxDQUFDLE1BQU0sRUFBRSxDQUFDO2dCQUNsRSxPQUFPLEVBQUUsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLEVBQUUsQ0FBQztZQUN2QyxDQUFDO1FBQ0wsQ0FBQztJQUNMLENBQUM7SUFDRCxPQUFPLEVBQUUsT0FBTyxFQUFFLElBQUksRUFBRSxNQUFNLEVBQUUsSUFBSSxFQUFFLE1BQU0sRUFBRSxJQUFJLEVBQUUsQ0FBQztBQUN6RCxDQUFDO0FBRUQ7O0dBRUc7QUFDSCxTQUFTLFlBQVksQ0FBQyxJQUFZLEVBQUUsT0FBZTtJQUMvQywyQkFBMkI7SUFDM0Isd0dBQXdHO0lBQ3hHLE1BQU0sWUFBWSxHQUFHLE9BQU87U0FDdkIsT0FBTyxDQUFDLFVBQVUsRUFBRSxPQUFPLENBQUMsQ0FBTSw0QkFBNEI7U0FDOUQsT0FBTyxDQUFDLFlBQVksRUFBRSxPQUFPLENBQUMsQ0FBSyw2QkFBNkI7U0FDaEUsT0FBTyxDQUFDLEtBQUssRUFBRSxLQUFLLENBQUMsQ0FBQyxDQUFhLGlCQUFpQjtJQUV6RCxnQ0FBZ0M7SUFDaEMsTUFBTSxLQUFLLEdBQUcsSUFBSSxNQUFNLENBQUMsSUFBSSxZQUFZLE9BQU8sQ0FBQyxDQUFDO0lBQ2xELE9BQU8sS0FBSyxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsQ0FBQztBQUM1QixDQUFDO0FBRUQ7O0dBRUc7QUFDSCxNQUFNLFVBQVUsY0FBYztJQUMxQixPQUFPLE1BQU0sQ0FBQyxJQUFJLENBQUMsZUFBZSxDQUFDLENBQUM7QUFDeEMsQ0FBQztBQUVEOztHQUVHO0FBQ0gsTUFBTSxVQUFVLGlCQUFpQixDQUFDLE9BQWU7SUFDN0MsT0FBTyxNQUFNLENBQUMsSUFBSSxDQUFDLGVBQWUsQ0FBQyxPQUFPLENBQUMsSUFBSSxFQUFFLENBQUMsQ0FBQztBQUN2RCxDQUFDO0FBRUQ7O0dBRUc7QUFDSCxNQUFNLFVBQVUsZUFBZSxDQUFDLE9BQWUsRUFBRSxNQUFjO0lBQzNELE9BQU8sZUFBZSxDQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUMsTUFBTSxDQUFDLElBQUksSUFBSSxDQUFDO0FBQ3RELENBQUMifQ==
|
||||
2
dist-in/constants.js
Normal file
2
dist-in/constants.js
Normal file
@ -0,0 +1,2 @@
|
||||
export const TEST_POST_ID = '8c1d567a-6909-4e43-b432-bd359bb10fc5';
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY29uc3RhbnRzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL2NvbnN0YW50cy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxNQUFNLENBQUMsTUFBTSxZQUFZLEdBQUcsc0NBQXNDLENBQUMifQ==
|
||||
179
dist-in/endpoints/admin.js
Normal file
179
dist-in/endpoints/admin.js
Normal file
File diff suppressed because one or more lines are too long
296
dist-in/endpoints/boss.js
Normal file
296
dist-in/endpoints/boss.js
Normal file
File diff suppressed because one or more lines are too long
69
dist-in/endpoints/stream.js
Normal file
69
dist-in/endpoints/stream.js
Normal file
@ -0,0 +1,69 @@
|
||||
import { createRouteBody } from '../products/serving/routes.js';
|
||||
import { streamSSE } from 'hono/streaming';
|
||||
import { z } from '@hono/zod-openapi';
|
||||
import { appEvents } from '../events.js';
|
||||
import { logger } from '../commons/logger.js';
|
||||
export const getStreamRoute = createRouteBody('get', '/api/stream', ['System'], 'Stream System Events', 'Subscribe to real-time updates for categories, posts, and pages.', undefined, {
|
||||
200: {
|
||||
description: 'Event Stream',
|
||||
content: {
|
||||
'text/event-stream': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}, true // public
|
||||
);
|
||||
// Track active connections
|
||||
const connectedClients = new Set();
|
||||
// Single listener for the entire application
|
||||
const broadcastAppUpdate = async (event) => {
|
||||
const payload = JSON.stringify(event);
|
||||
for (const client of connectedClients) {
|
||||
try {
|
||||
await client.stream.writeSSE({
|
||||
event: event.kind,
|
||||
data: payload
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
logger.error({ err, clientId: client.id }, 'Error broadcasting to stream');
|
||||
// Client will be removed by the onAbort handler in the stream handler
|
||||
}
|
||||
}
|
||||
};
|
||||
// Subscribe once
|
||||
appEvents.on('app-update', broadcastAppUpdate);
|
||||
export const streamHandler = async (c) => {
|
||||
return streamSSE(c, async (stream) => {
|
||||
const id = crypto.randomUUID();
|
||||
const client = { id, stream };
|
||||
connectedClients.add(client);
|
||||
// Send initial connection message
|
||||
await stream.writeSSE({
|
||||
event: 'connected',
|
||||
data: JSON.stringify({ message: 'Connected to event stream', clientId: id })
|
||||
});
|
||||
// Keep connection alive & handle cleanup
|
||||
let interval;
|
||||
const heartbeatInterval = parseInt(process.env.STREAM_HEARTBEAT_INTERVAL_MS || '30000', 10);
|
||||
// Send heartbeat to prevent timeouts
|
||||
interval = setInterval(async () => {
|
||||
try {
|
||||
await stream.writeSSE({ event: 'ping', data: '' });
|
||||
}
|
||||
catch (e) {
|
||||
// connection likely closed
|
||||
}
|
||||
}, heartbeatInterval);
|
||||
// Wait until the stream is aborted
|
||||
await new Promise((resolve) => {
|
||||
stream.onAbort(() => {
|
||||
connectedClients.delete(client);
|
||||
clearInterval(interval);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic3RyZWFtLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL2VuZHBvaW50cy9zdHJlYW0udHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLGVBQWUsRUFBRSxNQUFNLCtCQUErQixDQUFDO0FBRWhFLE9BQU8sRUFBRSxTQUFTLEVBQUUsTUFBTSxnQkFBZ0IsQ0FBQztBQUMzQyxPQUFPLEVBQUUsQ0FBQyxFQUFFLE1BQU0sbUJBQW1CLENBQUM7QUFDdEMsT0FBTyxFQUFFLFNBQVMsRUFBWSxNQUFNLGNBQWMsQ0FBQztBQUNuRCxPQUFPLEVBQUUsTUFBTSxFQUFFLE1BQU0sc0JBQXNCLENBQUM7QUFFOUMsTUFBTSxDQUFDLE1BQU0sY0FBYyxHQUFHLGVBQWUsQ0FDekMsS0FBSyxFQUNMLGFBQWEsRUFDYixDQUFDLFFBQVEsQ0FBQyxFQUNWLHNCQUFzQixFQUN0QixrRUFBa0UsRUFDbEUsU0FBUyxFQUNUO0lBQ0ksR0FBRyxFQUFFO1FBQ0QsV0FBVyxFQUFFLGNBQWM7UUFDM0IsT0FBTyxFQUFFO1lBQ0wsbUJBQW1CLEVBQUU7Z0JBQ2pCLE1BQU0sRUFBRSxDQUFDLENBQUMsTUFBTSxFQUFFO2FBQ3JCO1NBQ0o7S0FDSjtDQUNKLEVBQ0QsSUFBSSxDQUFDLFNBQVM7Q0FDakIsQ0FBQztBQUVGLDJCQUEyQjtBQUMzQixNQUFNLGdCQUFnQixHQUFHLElBQUksR0FBRyxFQUc1QixDQUFDO0FBRUwsNkNBQTZDO0FBQzdDLE1BQU0sa0JBQWtCLEdBQUcsS0FBSyxFQUFFLEtBQWUsRUFBRSxFQUFFO0lBQ2pELE1BQU0sT0FBTyxHQUFHLElBQUksQ0FBQyxTQUFTLENBQUMsS0FBSyxDQUFDLENBQUM7SUFDdEMsS0FBSyxNQUFNLE1BQU0sSUFBSSxnQkFBZ0IsRUFBRSxDQUFDO1FBQ3BDLElBQUksQ0FBQztZQUNELE1BQU0sTUFBTSxDQUFDLE1BQU0sQ0FBQyxRQUFRLENBQUM7Z0JBQ3pCLEtBQUssRUFBRSxLQUFLLENBQUMsSUFBSTtnQkFDakIsSUFBSSxFQUFFLE9BQU87YUFDaEIsQ0FBQyxDQUFDO1FBQ1AsQ0FBQztRQUFDLE9BQU8sR0FBRyxFQUFFLENBQUM7WUFDWCxNQUFNLENBQUMsS0FBSyxDQUFDLEVBQUUsR0FBRyxFQUFFLFFBQVEsRUFBRSxNQUFNLENBQUMsRUFBRSxFQUFFLEVBQUUsOEJBQThCLENBQUMsQ0FBQztZQUMzRSxzRUFBc0U7UUFDMUUsQ0FBQztJQUNMLENBQUM7QUFDTCxDQUFDLENBQUM7QUFFRixpQkFBaUI7QUFDakIsU0FBUyxDQUFDLEVBQUUsQ0FBQyxZQUFZLEVBQUUsa0JBQWtCLENBQUMsQ0FBQztBQUUvQyxNQUFNLENBQUMsTUFBTSxhQUFhLEdBQUcsS0FBSyxFQUFFLENBQVUsRUFBRSxFQUFFO0lBQzlDLE9BQU8sU0FBUyxDQUFDLENBQUMsRUFBRSxLQUFLLEVBQUUsTUFBTSxFQUFFLEVBQUU7UUFDakMsTUFBTSxFQUFFLEdBQUcsTUFBTSxDQUFDLFVBQVUsRUFBRSxDQUFDO1FBQy9CLE1BQU0sTUFBTSxHQUFHLEVBQUUsRUFBRSxFQUFFLE1BQU0sRUFBRSxDQUFDO1FBRTlCLGdCQUFnQixDQUFDLEdBQUcsQ0FBQyxNQUFNLENBQUMsQ0FBQztRQUM3QixrQ0FBa0M7UUFDbEMsTUFBTSxNQUFNLENBQUMsUUFBUSxDQUFDO1lBQ2xCLEtBQUssRUFBRSxXQUFXO1lBQ2xCLElBQUksRUFBRSxJQUFJLENBQUMsU0FBUyxDQUFDLEVBQUUsT0FBTyxFQUFFLDJCQUEyQixFQUFFLFFBQVEsRUFBRSxFQUFFLEVBQUUsQ0FBQztTQUMvRSxDQUFDLENBQUM7UUFFSCx5Q0FBeUM7UUFDekMsSUFBSSxRQUF3QixDQUFDO1FBQzdCLE1BQU0saUJBQWlCLEdBQUcsUUFBUSxDQUFDLE9BQU8sQ0FBQyxHQUFHLENBQUMsNEJBQTRCLElBQUksT0FBTyxFQUFFLEVBQUUsQ0FBQyxDQUFDO1FBRTVGLHFDQUFxQztRQUNyQyxRQUFRLEdBQUcsV0FBVyxDQUFDLEtBQUssSUFBSSxFQUFFO1lBQzlCLElBQUksQ0FBQztnQkFDRCxNQUFNLE1BQU0sQ0FBQyxRQUFRLENBQUMsRUFBRSxLQUFLLEVBQUUsTUFBTSxFQUFFLElBQUksRUFBRSxFQUFFLEVBQUUsQ0FBQyxDQUFDO1lBQ3ZELENBQUM7WUFBQyxPQUFPLENBQUMsRUFBRSxDQUFDO2dCQUNULDJCQUEyQjtZQUMvQixDQUFDO1FBQ0wsQ0FBQyxFQUFFLGlCQUFpQixDQUFDLENBQUM7UUFFdEIsbUNBQW1DO1FBQ25DLE1BQU0sSUFBSSxPQUFPLENBQU8sQ0FBQyxPQUFPLEVBQUUsRUFBRTtZQUNoQyxNQUFNLENBQUMsT0FBTyxDQUFDLEdBQUcsRUFBRTtnQkFDaEIsZ0JBQWdCLENBQUMsTUFBTSxDQUFDLE1BQU0sQ0FBQyxDQUFDO2dCQUNoQyxhQUFhLENBQUMsUUFBUSxDQUFDLENBQUM7Z0JBQ3hCLE9BQU8sRUFBRSxDQUFDO1lBQ2QsQ0FBQyxDQUFDLENBQUM7UUFDUCxDQUFDLENBQUMsQ0FBQztJQUNQLENBQUMsQ0FBQyxDQUFDO0FBQ1AsQ0FBQyxDQUFDIn0=
|
||||
27
dist-in/events.js
Normal file
27
dist-in/events.js
Normal file
@ -0,0 +1,27 @@
|
||||
import { EventEmitter } from 'events';
|
||||
class AppEvents extends EventEmitter {
|
||||
static instance;
|
||||
constructor() {
|
||||
super();
|
||||
// this.setMaxListeners(10); // Default is fine now
|
||||
}
|
||||
static getInstance() {
|
||||
if (!AppEvents.instance) {
|
||||
AppEvents.instance = new AppEvents();
|
||||
}
|
||||
return AppEvents.instance;
|
||||
}
|
||||
emitUpdate(type, action, data, kind = 'cache') {
|
||||
const event = {
|
||||
kind,
|
||||
type,
|
||||
action,
|
||||
id: data?.id ?? null,
|
||||
data,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
this.emit('app-update', event);
|
||||
}
|
||||
}
|
||||
export const appEvents = AppEvents.getInstance();
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZXZlbnRzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL2V2ZW50cy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sUUFBUSxDQUFDO0FBY3RDLE1BQU0sU0FBVSxTQUFRLFlBQVk7SUFDeEIsTUFBTSxDQUFDLFFBQVEsQ0FBWTtJQUVuQztRQUNJLEtBQUssRUFBRSxDQUFDO1FBQ1IsbURBQW1EO0lBQ3ZELENBQUM7SUFFTSxNQUFNLENBQUMsV0FBVztRQUNyQixJQUFJLENBQUMsU0FBUyxDQUFDLFFBQVEsRUFBRSxDQUFDO1lBQ3RCLFNBQVMsQ0FBQyxRQUFRLEdBQUcsSUFBSSxTQUFTLEVBQUUsQ0FBQztRQUN6QyxDQUFDO1FBQ0QsT0FBTyxTQUFTLENBQUMsUUFBUSxDQUFDO0lBQzlCLENBQUM7SUFFTSxVQUFVLENBQUMsSUFBZSxFQUFFLE1BQXNDLEVBQUUsSUFBUyxFQUFFLE9BQXlCLE9BQU87UUFDbEgsTUFBTSxLQUFLLEdBQWE7WUFDcEIsSUFBSTtZQUNKLElBQUk7WUFDSixNQUFNO1lBQ04sRUFBRSxFQUFFLElBQUksRUFBRSxFQUFFLElBQUksSUFBSTtZQUNwQixJQUFJO1lBQ0osU0FBUyxFQUFFLElBQUksQ0FBQyxHQUFHLEVBQUU7U0FDeEIsQ0FBQztRQUNGLElBQUksQ0FBQyxJQUFJLENBQUMsWUFBWSxFQUFFLEtBQUssQ0FBQyxDQUFDO0lBQ25DLENBQUM7Q0FDSjtBQUVELE1BQU0sQ0FBQyxNQUFNLFNBQVMsR0FBRyxTQUFTLENBQUMsV0FBVyxFQUFFLENBQUMifQ==
|
||||
221
dist-in/index.js
Normal file
221
dist-in/index.js
Normal file
File diff suppressed because one or more lines are too long
1208
dist-in/integrations/supabase/schemas.js
Normal file
1208
dist-in/integrations/supabase/schemas.js
Normal file
File diff suppressed because one or more lines are too long
41
dist-in/integrations/supabase/types.js
Normal file
41
dist-in/integrations/supabase/types.js
Normal file
@ -0,0 +1,41 @@
|
||||
export const Constants = {
|
||||
graphql_public: {
|
||||
Enums: {},
|
||||
},
|
||||
public: {
|
||||
Enums: {
|
||||
app_permission: [
|
||||
"pictures.read",
|
||||
"pictures.create",
|
||||
"pictures.update",
|
||||
"pictures.delete",
|
||||
"collections.read",
|
||||
"collections.create",
|
||||
"collections.update",
|
||||
"collections.delete",
|
||||
"comments.read",
|
||||
"comments.create",
|
||||
"comments.update",
|
||||
"comments.delete",
|
||||
"organization.manage",
|
||||
],
|
||||
app_role: ["owner", "admin", "member", "viewer"],
|
||||
cast_kind: ["implicit", "explicit", "lossy"],
|
||||
category_relation_type: [
|
||||
"generalization",
|
||||
"material_usage",
|
||||
"domain",
|
||||
"process_step",
|
||||
"standard",
|
||||
"other",
|
||||
],
|
||||
category_visibility: ["public", "unlisted", "private"],
|
||||
collaborator_role: ["viewer", "editor", "owner"],
|
||||
layout_visibility: ["public", "private", "listed", "custom"],
|
||||
translation_status: ["draft", "machine", "reviewed", "published"],
|
||||
type_kind: ["primitive", "enum", "flags", "structure", "alias", "field"],
|
||||
type_visibility: ["public", "private", "custom"],
|
||||
},
|
||||
},
|
||||
};
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidHlwZXMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvaW50ZWdyYXRpb25zL3N1cGFiYXNlL3R5cGVzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQStyREEsTUFBTSxDQUFDLE1BQU0sU0FBUyxHQUFHO0lBQ3ZCLGNBQWMsRUFBRTtRQUNkLEtBQUssRUFBRSxFQUFFO0tBQ1Y7SUFDRCxNQUFNLEVBQUU7UUFDTixLQUFLLEVBQUU7WUFDTCxjQUFjLEVBQUU7Z0JBQ2QsZUFBZTtnQkFDZixpQkFBaUI7Z0JBQ2pCLGlCQUFpQjtnQkFDakIsaUJBQWlCO2dCQUNqQixrQkFBa0I7Z0JBQ2xCLG9CQUFvQjtnQkFDcEIsb0JBQW9CO2dCQUNwQixvQkFBb0I7Z0JBQ3BCLGVBQWU7Z0JBQ2YsaUJBQWlCO2dCQUNqQixpQkFBaUI7Z0JBQ2pCLGlCQUFpQjtnQkFDakIscUJBQXFCO2FBQ3RCO1lBQ0QsUUFBUSxFQUFFLENBQUMsT0FBTyxFQUFFLE9BQU8sRUFBRSxRQUFRLEVBQUUsUUFBUSxDQUFDO1lBQ2hELFNBQVMsRUFBRSxDQUFDLFVBQVUsRUFBRSxVQUFVLEVBQUUsT0FBTyxDQUFDO1lBQzVDLHNCQUFzQixFQUFFO2dCQUN0QixnQkFBZ0I7Z0JBQ2hCLGdCQUFnQjtnQkFDaEIsUUFBUTtnQkFDUixjQUFjO2dCQUNkLFVBQVU7Z0JBQ1YsT0FBTzthQUNSO1lBQ0QsbUJBQW1CLEVBQUUsQ0FBQyxRQUFRLEVBQUUsVUFBVSxFQUFFLFNBQVMsQ0FBQztZQUN0RCxpQkFBaUIsRUFBRSxDQUFDLFFBQVEsRUFBRSxRQUFRLEVBQUUsT0FBTyxDQUFDO1lBQ2hELGlCQUFpQixFQUFFLENBQUMsUUFBUSxFQUFFLFNBQVMsRUFBRSxRQUFRLEVBQUUsUUFBUSxDQUFDO1lBQzVELGtCQUFrQixFQUFFLENBQUMsT0FBTyxFQUFFLFNBQVMsRUFBRSxVQUFVLEVBQUUsV0FBVyxDQUFDO1lBQ2pFLFNBQVMsRUFBRSxDQUFDLFdBQVcsRUFBRSxNQUFNLEVBQUUsT0FBTyxFQUFFLFdBQVcsRUFBRSxPQUFPLEVBQUUsT0FBTyxDQUFDO1lBQ3hFLGVBQWUsRUFBRSxDQUFDLFFBQVEsRUFBRSxTQUFTLEVBQUUsUUFBUSxDQUFDO1NBQ2pEO0tBQ0Y7Q0FDTyxDQUFBIn0=
|
||||
35
dist-in/jobs/boss/AbstractWorker.js
Normal file
35
dist-in/jobs/boss/AbstractWorker.js
Normal file
@ -0,0 +1,35 @@
|
||||
import { logger } from '../../commons/logger.js';
|
||||
export class AbstractWorker {
|
||||
queueOptions; // pg-boss QueueOptions
|
||||
emitter;
|
||||
// Main entry point for pg-boss
|
||||
async handler(jobOrJobs) {
|
||||
const job = Array.isArray(jobOrJobs) ? jobOrJobs[0] : jobOrJobs;
|
||||
// Safety check
|
||||
if (!job) {
|
||||
logger.error(`[${this.queueName}] Received null or empty job`);
|
||||
return;
|
||||
}
|
||||
const jobId = job.id;
|
||||
const usageId = job.data?.usageId;
|
||||
logger.info(`[${this.queueName}] Starting job ${jobId}`);
|
||||
try {
|
||||
// 2. Execute Business Logic
|
||||
const result = await this.process(job);
|
||||
// 3. Calculate Cost
|
||||
const cost = this.calculateCost(job, result);
|
||||
if (this.emitter) {
|
||||
this.emitter.emit('job:complete', {
|
||||
jobId,
|
||||
result
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (error) {
|
||||
logger.error({ err: error }, `[${this.queueName}] Job failed`);
|
||||
throw error; // Let pg-boss handle retry/failure
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiQWJzdHJhY3RXb3JrZXIuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvam9icy9ib3NzL0Fic3RyYWN0V29ya2VyLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUVBLE9BQU8sRUFBRSxNQUFNLEVBQUUsTUFBTSx5QkFBeUIsQ0FBQztBQUlqRCxNQUFNLE9BQWdCLGNBQWM7SUFFdkIsWUFBWSxDQUFPLENBQUMsdUJBQXVCO0lBQzFDLE9BQU8sQ0FBZ0I7SUFRakMsK0JBQStCO0lBQ3hCLEtBQUssQ0FBQyxPQUFPLENBQUMsU0FBb0M7UUFFckQsTUFBTSxHQUFHLEdBQUcsS0FBSyxDQUFDLE9BQU8sQ0FBQyxTQUFTLENBQUMsQ0FBQyxDQUFDLENBQUMsU0FBUyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxTQUFTLENBQUM7UUFFaEUsZUFBZTtRQUNmLElBQUksQ0FBQyxHQUFHLEVBQUUsQ0FBQztZQUNQLE1BQU0sQ0FBQyxLQUFLLENBQUMsSUFBSSxJQUFJLENBQUMsU0FBUyw4QkFBOEIsQ0FBQyxDQUFDO1lBQy9ELE9BQU87UUFDWCxDQUFDO1FBRUQsTUFBTSxLQUFLLEdBQUcsR0FBRyxDQUFDLEVBQUUsQ0FBQztRQUNyQixNQUFNLE9BQU8sR0FBSSxHQUFHLENBQUMsSUFBWSxFQUFFLE9BQU8sQ0FBQztRQUUzQyxNQUFNLENBQUMsSUFBSSxDQUFDLElBQUksSUFBSSxDQUFDLFNBQVMsa0JBQWtCLEtBQUssRUFBRSxDQUFDLENBQUM7UUFFekQsSUFBSSxDQUFDO1lBQ0QsNEJBQTRCO1lBQzVCLE1BQU0sTUFBTSxHQUFHLE1BQU0sSUFBSSxDQUFDLE9BQU8sQ0FBQyxHQUFHLENBQUMsQ0FBQztZQUV2QyxvQkFBb0I7WUFDcEIsTUFBTSxJQUFJLEdBQUcsSUFBSSxDQUFDLGFBQWEsQ0FBQyxHQUFHLEVBQUUsTUFBTSxDQUFDLENBQUM7WUFFN0MsSUFBSSxJQUFJLENBQUMsT0FBTyxFQUFFLENBQUM7Z0JBQ2YsSUFBSSxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMsY0FBYyxFQUFFO29CQUM5QixLQUFLO29CQUNMLE1BQU07aUJBQ1QsQ0FBQyxDQUFDO1lBQ1AsQ0FBQztZQUdELE9BQU8sTUFBTSxDQUFDO1FBRWxCLENBQUM7UUFBQyxPQUFPLEtBQVUsRUFBRSxDQUFDO1lBRWxCLE1BQU0sQ0FBQyxLQUFLLENBQUMsRUFBRSxHQUFHLEVBQUUsS0FBSyxFQUFFLEVBQUUsSUFBSSxJQUFJLENBQUMsU0FBUyxjQUFjLENBQUMsQ0FBQztZQUUvRCxNQUFNLEtBQUssQ0FBQyxDQUFDLG1DQUFtQztRQUNwRCxDQUFDO0lBQ0wsQ0FBQztDQUNKIn0=
|
||||
41
dist-in/jobs/boss/client.js
Normal file
41
dist-in/jobs/boss/client.js
Normal file
@ -0,0 +1,41 @@
|
||||
import { PgBoss } from 'pg-boss';
|
||||
import { logger } from '../../commons/logger.js';
|
||||
const connectionString = process.env.DATABASE_URL;
|
||||
if (!connectionString) {
|
||||
logger.warn('DATABASE_URL not found, PgBoss will not be initialized');
|
||||
}
|
||||
export const boss = connectionString ? new PgBoss({
|
||||
connectionString,
|
||||
__test__enableSpies: true
|
||||
}) : null;
|
||||
export let bossInitError = null;
|
||||
export async function startBoss() {
|
||||
if (!boss)
|
||||
return;
|
||||
boss.on('error', (error) => logger.error({ error }, 'PgBoss error'));
|
||||
try {
|
||||
await boss.start();
|
||||
logger.info('PgBoss started');
|
||||
return boss;
|
||||
}
|
||||
catch (error) {
|
||||
bossInitError = error;
|
||||
logger.error({ error }, 'Failed to start PgBoss');
|
||||
const fs = await import('fs');
|
||||
fs.writeFileSync('debug_pgboss_error.txt', JSON.stringify(error, Object.getOwnPropertyNames(error)));
|
||||
}
|
||||
}
|
||||
export async function stopBoss() {
|
||||
if (!boss) {
|
||||
console.info('PgBoss not initialized, skipping stop.');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await boss.stop({ timeout: 5000 }); // 5s timeout
|
||||
console.info('PgBoss stopped');
|
||||
}
|
||||
catch (error) {
|
||||
console.error({ error }, 'Failed to stop PgBoss');
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY2xpZW50LmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vLi4vc3JjL2pvYnMvYm9zcy9jbGllbnQudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLFNBQVMsQ0FBQztBQUNqQyxPQUFPLEVBQUUsTUFBTSxFQUFFLE1BQU0seUJBQXlCLENBQUM7QUFFakQsTUFBTSxnQkFBZ0IsR0FBRyxPQUFPLENBQUMsR0FBRyxDQUFDLFlBQVksQ0FBQztBQUVsRCxJQUFJLENBQUMsZ0JBQWdCLEVBQUUsQ0FBQztJQUNwQixNQUFNLENBQUMsSUFBSSxDQUFDLHdEQUF3RCxDQUFDLENBQUM7QUFDMUUsQ0FBQztBQUVELE1BQU0sQ0FBQyxNQUFNLElBQUksR0FBRyxnQkFBZ0IsQ0FBQyxDQUFDLENBQUMsSUFBSSxNQUFNLENBQUM7SUFDOUMsZ0JBQWdCO0lBQ2hCLG1CQUFtQixFQUFFLElBQUk7Q0FDckIsQ0FBQyxDQUFDLENBQUMsQ0FBQyxJQUFJLENBQUM7QUFDakIsTUFBTSxDQUFDLElBQUksYUFBYSxHQUFpQixJQUFJLENBQUM7QUFFOUMsTUFBTSxDQUFDLEtBQUssVUFBVSxTQUFTO0lBQzNCLElBQUksQ0FBQyxJQUFJO1FBQUUsT0FBTztJQUVsQixJQUFJLENBQUMsRUFBRSxDQUFDLE9BQU8sRUFBRSxDQUFDLEtBQVksRUFBRSxFQUFFLENBQUMsTUFBTSxDQUFDLEtBQUssQ0FBQyxFQUFFLEtBQUssRUFBRSxFQUFFLGNBQWMsQ0FBQyxDQUFDLENBQUM7SUFFNUUsSUFBSSxDQUFDO1FBQ0QsTUFBTSxJQUFJLENBQUMsS0FBSyxFQUFFLENBQUM7UUFDbkIsTUFBTSxDQUFDLElBQUksQ0FBQyxnQkFBZ0IsQ0FBQyxDQUFDO1FBQzlCLE9BQU8sSUFBSSxDQUFDO0lBQ2hCLENBQUM7SUFBQyxPQUFPLEtBQVUsRUFBRSxDQUFDO1FBQ2xCLGFBQWEsR0FBRyxLQUFLLENBQUM7UUFDdEIsTUFBTSxDQUFDLEtBQUssQ0FBQyxFQUFFLEtBQUssRUFBRSxFQUFFLHdCQUF3QixDQUFDLENBQUM7UUFDbEQsTUFBTSxFQUFFLEdBQUcsTUFBTSxNQUFNLENBQUMsSUFBSSxDQUFDLENBQUM7UUFDOUIsRUFBRSxDQUFDLGFBQWEsQ0FBQyx3QkFBd0IsRUFBRSxJQUFJLENBQUMsU0FBUyxDQUFDLEtBQUssRUFBRSxNQUFNLENBQUMsbUJBQW1CLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxDQUFDO0lBQ3pHLENBQUM7QUFDTCxDQUFDO0FBRUQsTUFBTSxDQUFDLEtBQUssVUFBVSxRQUFRO0lBQzFCLElBQUksQ0FBQyxJQUFJLEVBQUUsQ0FBQztRQUNSLE9BQU8sQ0FBQyxJQUFJLENBQUMsd0NBQXdDLENBQUMsQ0FBQTtRQUN0RCxPQUFNO0lBQ1YsQ0FBQztJQUNELElBQUksQ0FBQztRQUNELE1BQU0sSUFBSSxDQUFDLElBQUksQ0FBQyxFQUFFLE9BQU8sRUFBRSxJQUFJLEVBQUUsQ0FBQyxDQUFDLENBQUMsYUFBYTtRQUNqRCxPQUFPLENBQUMsSUFBSSxDQUFDLGdCQUFnQixDQUFDLENBQUM7SUFDbkMsQ0FBQztJQUFDLE9BQU8sS0FBSyxFQUFFLENBQUM7UUFDYixPQUFPLENBQUMsS0FBSyxDQUFDLEVBQUUsS0FBSyxFQUFFLEVBQUUsdUJBQXVCLENBQUMsQ0FBQztJQUN0RCxDQUFDO0FBQ0wsQ0FBQyJ9
|
||||
13
dist-in/jobs/boss/registry.js
Normal file
13
dist-in/jobs/boss/registry.js
Normal file
@ -0,0 +1,13 @@
|
||||
export class WorkerRegistry {
|
||||
static workers = new Map();
|
||||
static register(queueName, handler, options) {
|
||||
this.workers.set(queueName, { queueName, handler, options });
|
||||
}
|
||||
static get(queueName) {
|
||||
return this.workers.get(queueName);
|
||||
}
|
||||
static getAll() {
|
||||
return Array.from(this.workers.values());
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicmVnaXN0cnkuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvam9icy9ib3NzL3JlZ2lzdHJ5LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQVVBLE1BQU0sT0FBTyxjQUFjO0lBQ2YsTUFBTSxDQUFDLE9BQU8sR0FBOEIsSUFBSSxHQUFHLEVBQUUsQ0FBQztJQUU5RCxNQUFNLENBQUMsUUFBUSxDQUFDLFNBQWlCLEVBQUUsT0FBc0IsRUFBRSxPQUFhO1FBQ3BFLElBQUksQ0FBQyxPQUFPLENBQUMsR0FBRyxDQUFDLFNBQVMsRUFBRSxFQUFFLFNBQVMsRUFBRSxPQUFPLEVBQUUsT0FBTyxFQUFFLENBQUMsQ0FBQztJQUNqRSxDQUFDO0lBRUQsTUFBTSxDQUFDLEdBQUcsQ0FBQyxTQUFpQjtRQUN4QixPQUFPLElBQUksQ0FBQyxPQUFPLENBQUMsR0FBRyxDQUFDLFNBQVMsQ0FBQyxDQUFDO0lBQ3ZDLENBQUM7SUFFRCxNQUFNLENBQUMsTUFBTTtRQUNULE9BQU8sS0FBSyxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsT0FBTyxDQUFDLE1BQU0sRUFBRSxDQUFDLENBQUM7SUFDN0MsQ0FBQyJ9
|
||||
108
dist-in/jobs/boss/search/SearchWorker.js
Normal file
108
dist-in/jobs/boss/search/SearchWorker.js
Normal file
File diff suppressed because one or more lines are too long
25
dist-in/jobs/boss/workers.js
Normal file
25
dist-in/jobs/boss/workers.js
Normal file
@ -0,0 +1,25 @@
|
||||
import { boss } from './client.js';
|
||||
import { logger } from '@/commons/logger.js';
|
||||
export const QUEUE_MOCK_JOB = 'mock-job';
|
||||
export async function registerMockWorkers() {
|
||||
if (!boss)
|
||||
return;
|
||||
// Product workers are now registered by the products themselves in AbstractProduct.start()
|
||||
await boss.createQueue(QUEUE_MOCK_JOB);
|
||||
await boss.work(QUEUE_MOCK_JOB, async (jobs) => {
|
||||
// PgBoss might pass an array of jobs or a single job depending on config/version
|
||||
const job = Array.isArray(jobs) ? jobs[0] : jobs;
|
||||
const data = job.data || {};
|
||||
const { delayMs = 100, shouldFail = false } = data;
|
||||
const jobId = job.id;
|
||||
logger.info({ jobId, data }, 'Processing PgBoss mock job');
|
||||
await new Promise(resolve => setTimeout(resolve, delayMs));
|
||||
if (shouldFail) {
|
||||
throw new Error('Simulated PgBoss job failure');
|
||||
}
|
||||
logger.info({ jobId }, 'PgBoss mock job completed');
|
||||
return { success: true };
|
||||
});
|
||||
logger.info('PgBoss workers registered');
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoid29ya2Vycy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9qb2JzL2Jvc3Mvd29ya2Vycy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsSUFBSSxFQUFFLE1BQU0sYUFBYSxDQUFDO0FBRW5DLE9BQU8sRUFBRSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQztBQUU3QyxNQUFNLENBQUMsTUFBTSxjQUFjLEdBQUcsVUFBVSxDQUFDO0FBUXpDLE1BQU0sQ0FBQyxLQUFLLFVBQVUsbUJBQW1CO0lBQ3JDLElBQUksQ0FBQyxJQUFJO1FBQUUsT0FBTztJQUVsQiwyRkFBMkY7SUFFM0YsTUFBTSxJQUFJLENBQUMsV0FBVyxDQUFDLGNBQWMsQ0FBQyxDQUFDO0lBQ3ZDLE1BQU0sSUFBSSxDQUFDLElBQUksQ0FBYyxjQUFjLEVBQUUsS0FBSyxFQUFFLElBQVMsRUFBRSxFQUFFO1FBQzdELGlGQUFpRjtRQUNqRixNQUFNLEdBQUcsR0FBRyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQztRQUVqRCxNQUFNLElBQUksR0FBRyxHQUFHLENBQUMsSUFBSSxJQUFJLEVBQUUsQ0FBQztRQUM1QixNQUFNLEVBQUUsT0FBTyxHQUFHLEdBQUcsRUFBRSxVQUFVLEdBQUcsS0FBSyxFQUFFLEdBQUcsSUFBSSxDQUFDO1FBQ25ELE1BQU0sS0FBSyxHQUFHLEdBQUcsQ0FBQyxFQUFFLENBQUM7UUFFckIsTUFBTSxDQUFDLElBQUksQ0FBQyxFQUFFLEtBQUssRUFBRSxJQUFJLEVBQUUsRUFBRSw0QkFBNEIsQ0FBQyxDQUFDO1FBRTNELE1BQU0sSUFBSSxPQUFPLENBQUMsT0FBTyxDQUFDLEVBQUUsQ0FBQyxVQUFVLENBQUMsT0FBTyxFQUFFLE9BQU8sQ0FBQyxDQUFDLENBQUM7UUFFM0QsSUFBSSxVQUFVLEVBQUUsQ0FBQztZQUNiLE1BQU0sSUFBSSxLQUFLLENBQUMsOEJBQThCLENBQUMsQ0FBQztRQUNwRCxDQUFDO1FBRUQsTUFBTSxDQUFDLElBQUksQ0FBQyxFQUFFLEtBQUssRUFBRSxFQUFFLDJCQUEyQixDQUFDLENBQUM7UUFDcEQsT0FBTyxFQUFFLE9BQU8sRUFBRSxJQUFJLEVBQUUsQ0FBQztJQUM3QixDQUFDLENBQUMsQ0FBQztJQUVILE1BQU0sQ0FBQyxJQUFJLENBQUMsMkJBQTJCLENBQUMsQ0FBQztBQUM3QyxDQUFDIn0=
|
||||
5
dist-in/lib/analytics-emitter.js
Normal file
5
dist-in/lib/analytics-emitter.js
Normal file
@ -0,0 +1,5 @@
|
||||
import { EventEmitter } from 'events';
|
||||
class AnalyticsEmitter extends EventEmitter {
|
||||
}
|
||||
export const analyticsEmitter = new AnalyticsEmitter();
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYW5hbHl0aWNzLWVtaXR0ZXIuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvbGliL2FuYWx5dGljcy1lbWl0dGVyLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sRUFBRSxZQUFZLEVBQUUsTUFBTSxRQUFRLENBQUM7QUFFdEMsTUFBTSxnQkFBaUIsU0FBUSxZQUFZO0NBQUk7QUFFL0MsTUFBTSxDQUFDLE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxnQkFBZ0IsRUFBRSxDQUFDIn0=
|
||||
114
dist-in/middleware/analytics.js
Normal file
114
dist-in/middleware/analytics.js
Normal file
@ -0,0 +1,114 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { analyticsEmitter } from '../lib/analytics-emitter.js';
|
||||
// import { isBotRequest, isAIRequest } from '../products/serving/bots.js';
|
||||
const ANALYTICS_FILE = path.resolve(process.cwd(), 'logs/analytics.jsonl');
|
||||
// Extensions to ignore
|
||||
const IGNORED_EXTENSIONS = new Set([
|
||||
'.js', '.css', '.png', '.jpg', '.jpeg', '.gif', '.ico', '.svg', '.woff', '.woff2', '.ttf', '.eot', '.map'
|
||||
]);
|
||||
export const REVERSE_DEFAULT = { continent: 'unknown', countryName: 'unknown', city: 'unknown' };
|
||||
const GEO_CACHE_FILE = path.resolve(process.cwd(), 'cache/geoip.json');
|
||||
// Simple in-memory cache to reduce disk I/O, initialized on first use
|
||||
let geoCache = null;
|
||||
const loadGeoCache = () => {
|
||||
if (geoCache)
|
||||
return geoCache;
|
||||
try {
|
||||
if (fs.existsSync(GEO_CACHE_FILE)) {
|
||||
const data = fs.readFileSync(GEO_CACHE_FILE, 'utf-8');
|
||||
geoCache = JSON.parse(data);
|
||||
}
|
||||
else {
|
||||
geoCache = {};
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
console.error('Error loading geo cache', e);
|
||||
geoCache = {};
|
||||
}
|
||||
return geoCache;
|
||||
};
|
||||
const saveGeoCache = (ip, data) => {
|
||||
if (!geoCache)
|
||||
geoCache = {};
|
||||
geoCache[ip] = data;
|
||||
// Ensure directory exists
|
||||
const dir = path.dirname(GEO_CACHE_FILE);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
// Write to file (async to not block)
|
||||
fs.promises.writeFile(GEO_CACHE_FILE, JSON.stringify(geoCache, null, 2)).catch(err => {
|
||||
console.error('Error saving geo cache', err);
|
||||
});
|
||||
};
|
||||
export const reverse = async (ip, opts) => {
|
||||
return REVERSE_DEFAULT;
|
||||
/*
|
||||
const cache = loadGeoCache();
|
||||
if (cache && cache[ip]) {
|
||||
return cache[ip];
|
||||
}
|
||||
|
||||
const config = CONFIG_DEFAULT() as any
|
||||
try {
|
||||
const q = `https://api-bdc.net/data/ip-geolocation?ip=${ip}&localityLanguage=en&key=${config.bigdata.key}`
|
||||
const ret = await axios.get(q) || { data: REVERSE_DEFAULT }
|
||||
const data = ret.data || REVERSE_DEFAULT
|
||||
saveGeoCache(ip, data);
|
||||
return data;
|
||||
} catch (e: any) {
|
||||
logger.error('Error reverse geocoding', e.message)
|
||||
return REVERSE_DEFAULT
|
||||
}
|
||||
*/
|
||||
};
|
||||
export async function analyticsMiddleware(c, next) {
|
||||
await next(); // Execute the request first (non-blocking for the response?)
|
||||
// Wait, "await next()" blocks the middleware until the downstream handlers finish.
|
||||
// If we want to capture the status code, we need to wait.
|
||||
// The user asked for "non blocking analytics middleware".
|
||||
// Usually this means the *write* operation shouldn't block the response.
|
||||
// So we can do the logging logic *after* `await next()`, but ensuring the file write is not awaited or is fire-and-forget.
|
||||
try {
|
||||
const url = new URL(c.req.url);
|
||||
const pathname = url.pathname;
|
||||
const extension = path.extname(pathname).toLowerCase();
|
||||
// Filter static assets
|
||||
if (IGNORED_EXTENSIONS.has(extension)) {
|
||||
return;
|
||||
}
|
||||
// Additional check for common static paths if they don't have extensions
|
||||
if (pathname.startsWith('/assets/') || pathname.startsWith('/static/')) {
|
||||
return;
|
||||
}
|
||||
const ip = c.req.header('x-forwarded-for') || c.req.header('cf-connecting-ip') || '92.176.215.140';
|
||||
const geo = REVERSE_DEFAULT; // || ip !== 'unknown' ? await reverse(ip, CONFIG_DEFAULT()) : REVERSE_DEFAULT
|
||||
const userAgent = c.req.header('user-agent');
|
||||
const entry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
method: c.req.method,
|
||||
path: pathname,
|
||||
status: c.res.status,
|
||||
ip,
|
||||
userAgent,
|
||||
// isBot: isBotRequest(userAgent),
|
||||
// isAI: isAIRequest(userAgent),
|
||||
referer: c.req.header('referer'),
|
||||
userId: c.get('userId'),
|
||||
geo
|
||||
};
|
||||
const line = JSON.stringify(entry) + '\n';
|
||||
// Emit event for real-time streaming
|
||||
analyticsEmitter.emit('log', entry);
|
||||
// Fire and forget write
|
||||
fs.promises.appendFile(ANALYTICS_FILE, line).catch(err => {
|
||||
console.error('Failed to write to analytics file:', err);
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
console.error('Error in analytics middleware:', err);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYW5hbHl0aWNzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL21pZGRsZXdhcmUvYW5hbHl0aWNzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUNBLE9BQU8sRUFBRSxNQUFNLElBQUksQ0FBQztBQUNwQixPQUFPLElBQUksTUFBTSxNQUFNLENBQUM7QUFFeEIsT0FBTyxFQUFFLGdCQUFnQixFQUFFLE1BQU0sNkJBQTZCLENBQUM7QUFFL0QsMkVBQTJFO0FBRTNFLE1BQU0sY0FBYyxHQUFHLElBQUksQ0FBQyxPQUFPLENBQUMsT0FBTyxDQUFDLEdBQUcsRUFBRSxFQUFFLHNCQUFzQixDQUFDLENBQUM7QUFFM0UsdUJBQXVCO0FBQ3ZCLE1BQU0sa0JBQWtCLEdBQUcsSUFBSSxHQUFHLENBQUM7SUFDL0IsS0FBSyxFQUFFLE1BQU0sRUFBRSxNQUFNLEVBQUUsTUFBTSxFQUFFLE9BQU8sRUFBRSxNQUFNLEVBQUUsTUFBTSxFQUFFLE1BQU0sRUFBRSxPQUFPLEVBQUUsUUFBUSxFQUFFLE1BQU0sRUFBRSxNQUFNLEVBQUUsTUFBTTtDQUM1RyxDQUFDLENBQUM7QUFDSCxNQUFNLENBQUMsTUFBTSxlQUFlLEdBQUcsRUFBRSxTQUFTLEVBQUUsU0FBUyxFQUFFLFdBQVcsRUFBRSxTQUFTLEVBQUUsSUFBSSxFQUFFLFNBQVMsRUFBRSxDQUFBO0FBOENoRyxNQUFNLGNBQWMsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLE9BQU8sQ0FBQyxHQUFHLEVBQUUsRUFBRSxrQkFBa0IsQ0FBQyxDQUFDO0FBRXZFLHNFQUFzRTtBQUN0RSxJQUFJLFFBQVEsR0FBK0IsSUFBSSxDQUFDO0FBRWhELE1BQU0sWUFBWSxHQUFHLEdBQUcsRUFBRTtJQUN0QixJQUFJLFFBQVE7UUFBRSxPQUFPLFFBQVEsQ0FBQztJQUM5QixJQUFJLENBQUM7UUFDRCxJQUFJLEVBQUUsQ0FBQyxVQUFVLENBQUMsY0FBYyxDQUFDLEVBQUUsQ0FBQztZQUNoQyxNQUFNLElBQUksR0FBRyxFQUFFLENBQUMsWUFBWSxDQUFDLGNBQWMsRUFBRSxPQUFPLENBQUMsQ0FBQztZQUN0RCxRQUFRLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxJQUFJLENBQUMsQ0FBQztRQUNoQyxDQUFDO2FBQU0sQ0FBQztZQUNKLFFBQVEsR0FBRyxFQUFFLENBQUM7UUFDbEIsQ0FBQztJQUNMLENBQUM7SUFBQyxPQUFPLENBQUMsRUFBRSxDQUFDO1FBQ1QsT0FBTyxDQUFDLEtBQUssQ0FBQyx5QkFBeUIsRUFBRSxDQUFDLENBQUMsQ0FBQztRQUM1QyxRQUFRLEdBQUcsRUFBRSxDQUFDO0lBQ2xCLENBQUM7SUFDRCxPQUFPLFFBQVEsQ0FBQztBQUNwQixDQUFDLENBQUM7QUFFRixNQUFNLFlBQVksR0FBRyxDQUFDLEVBQVUsRUFBRSxJQUFTLEVBQUUsRUFBRTtJQUMzQyxJQUFJLENBQUMsUUFBUTtRQUFFLFFBQVEsR0FBRyxFQUFFLENBQUM7SUFDN0IsUUFBUSxDQUFDLEVBQUUsQ0FBQyxHQUFHLElBQUksQ0FBQztJQUVwQiwwQkFBMEI7SUFDMUIsTUFBTSxHQUFHLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxjQUFjLENBQUMsQ0FBQztJQUN6QyxJQUFJLENBQUMsRUFBRSxDQUFDLFVBQVUsQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDO1FBQ3RCLEVBQUUsQ0FBQyxTQUFTLENBQUMsR0FBRyxFQUFFLEVBQUUsU0FBUyxFQUFFLElBQUksRUFBRSxDQUFDLENBQUM7SUFDM0MsQ0FBQztJQUVELHFDQUFxQztJQUNyQyxFQUFFLENBQUMsUUFBUSxDQUFDLFNBQVMsQ0FBQyxjQUFjLEVBQUUsSUFBSSxDQUFDLFNBQVMsQ0FBQyxRQUFRLEVBQUUsSUFBSSxFQUFFLENBQUMsQ0FBQyxDQUFDLENBQUMsS0FBSyxDQUFDLEdBQUcsQ0FBQyxFQUFFO1FBQ2pGLE9BQU8sQ0FBQyxLQUFLLENBQUMsd0JBQXdCLEVBQUUsR0FBRyxDQUFDLENBQUM7SUFDakQsQ0FBQyxDQUFDLENBQUM7QUFDUCxDQUFDLENBQUM7QUFFRixNQUFNLENBQUMsTUFBTSxPQUFPLEdBQUcsS0FBSyxFQUFFLEVBQVUsRUFBRSxJQUFTLEVBQUUsRUFBRTtJQUNuRCxPQUFPLGVBQWUsQ0FBQztJQUN2Qjs7Ozs7Ozs7Ozs7Ozs7Ozs7TUFpQkU7QUFDTixDQUFDLENBQUE7QUFFRCxNQUFNLENBQUMsS0FBSyxVQUFVLG1CQUFtQixDQUFDLENBQVUsRUFBRSxJQUFVO0lBQzVELE1BQU0sSUFBSSxFQUFFLENBQUMsQ0FBQyw4REFBOEQ7SUFDNUUsb0ZBQW9GO0lBQ3BGLDBEQUEwRDtJQUMxRCwwREFBMEQ7SUFDMUQseUVBQXlFO0lBQ3pFLDJIQUEySDtJQUUzSCxJQUFJLENBQUM7UUFDRCxNQUFNLEdBQUcsR0FBRyxJQUFJLEdBQUcsQ0FBQyxDQUFDLENBQUMsR0FBRyxDQUFDLEdBQUcsQ0FBQyxDQUFDO1FBQy9CLE1BQU0sUUFBUSxHQUFHLEdBQUcsQ0FBQyxRQUFRLENBQUM7UUFDOUIsTUFBTSxTQUFTLEdBQUcsSUFBSSxDQUFDLE9BQU8sQ0FBQyxRQUFRLENBQUMsQ0FBQyxXQUFXLEVBQUUsQ0FBQztRQUV2RCx1QkFBdUI7UUFDdkIsSUFBSSxrQkFBa0IsQ0FBQyxHQUFHLENBQUMsU0FBUyxDQUFDLEVBQUUsQ0FBQztZQUNwQyxPQUFPO1FBQ1gsQ0FBQztRQUVELHlFQUF5RTtRQUN6RSxJQUFJLFFBQVEsQ0FBQyxVQUFVLENBQUMsVUFBVSxDQUFDLElBQUksUUFBUSxDQUFDLFVBQVUsQ0FBQyxVQUFVLENBQUMsRUFBRSxDQUFDO1lBQ3JFLE9BQU87UUFDWCxDQUFDO1FBRUQsTUFBTSxFQUFFLEdBQUcsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxNQUFNLENBQUMsaUJBQWlCLENBQUMsSUFBSSxDQUFDLENBQUMsR0FBRyxDQUFDLE1BQU0sQ0FBQyxrQkFBa0IsQ0FBQyxJQUFJLGdCQUFnQixDQUFBO1FBQ2xHLE1BQU0sR0FBRyxHQUFHLGVBQWUsQ0FBQyxDQUFDLCtFQUErRTtRQUM1RyxNQUFNLFNBQVMsR0FBRyxDQUFDLENBQUMsR0FBRyxDQUFDLE1BQU0sQ0FBQyxZQUFZLENBQUMsQ0FBQztRQUM3QyxNQUFNLEtBQUssR0FBUTtZQUNmLFNBQVMsRUFBRSxJQUFJLElBQUksRUFBRSxDQUFDLFdBQVcsRUFBRTtZQUNuQyxNQUFNLEVBQUUsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxNQUFNO1lBQ3BCLElBQUksRUFBRSxRQUFRO1lBQ2QsTUFBTSxFQUFFLENBQUMsQ0FBQyxHQUFHLENBQUMsTUFBTTtZQUNwQixFQUFFO1lBQ0YsU0FBUztZQUNULGtDQUFrQztZQUNsQyxnQ0FBZ0M7WUFDaEMsT0FBTyxFQUFFLENBQUMsQ0FBQyxHQUFHLENBQUMsTUFBTSxDQUFDLFNBQVMsQ0FBQztZQUNoQyxNQUFNLEVBQUUsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxRQUFRLENBQUM7WUFDdkIsR0FBRztTQUNOLENBQUM7UUFFRixNQUFNLElBQUksR0FBRyxJQUFJLENBQUMsU0FBUyxDQUFDLEtBQUssQ0FBQyxHQUFHLElBQUksQ0FBQztRQUMxQyxxQ0FBcUM7UUFDckMsZ0JBQWdCLENBQUMsSUFBSSxDQUFDLEtBQUssRUFBRSxLQUFLLENBQUMsQ0FBQztRQUNwQyx3QkFBd0I7UUFDeEIsRUFBRSxDQUFDLFFBQVEsQ0FBQyxVQUFVLENBQUMsY0FBYyxFQUFFLElBQUksQ0FBQyxDQUFDLEtBQUssQ0FBQyxHQUFHLENBQUMsRUFBRTtZQUNyRCxPQUFPLENBQUMsS0FBSyxDQUFDLG9DQUFvQyxFQUFFLEdBQUcsQ0FBQyxDQUFDO1FBQzdELENBQUMsQ0FBQyxDQUFDO0lBRVAsQ0FBQztJQUFDLE9BQU8sR0FBRyxFQUFFLENBQUM7UUFDWCxPQUFPLENBQUMsS0FBSyxDQUFDLGdDQUFnQyxFQUFFLEdBQUcsQ0FBQyxDQUFDO0lBQ3pELENBQUM7QUFDTCxDQUFDIn0=
|
||||
118
dist-in/middleware/auth.js
Normal file
118
dist-in/middleware/auth.js
Normal file
File diff suppressed because one or more lines are too long
376
dist-in/middleware/autoBan.js
Normal file
376
dist-in/middleware/autoBan.js
Normal file
File diff suppressed because one or more lines are too long
105
dist-in/middleware/blocklist.js
Normal file
105
dist-in/middleware/blocklist.js
Normal file
@ -0,0 +1,105 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { join, dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
let blocklist = {
|
||||
blockedIPs: [],
|
||||
blockedUserIds: [],
|
||||
blockedTokens: [],
|
||||
};
|
||||
/**
|
||||
* Load blocklist from JSON file
|
||||
*/
|
||||
export function loadBlocklist() {
|
||||
try {
|
||||
const blocklistPath = join(process.cwd(), 'config', 'blocklist.json');
|
||||
const data = readFileSync(blocklistPath, 'utf-8');
|
||||
blocklist = JSON.parse(data);
|
||||
return blocklist;
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Failed to load blocklist:', error);
|
||||
return blocklist;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get current blocklist
|
||||
*/
|
||||
export function getBlocklist() {
|
||||
return blocklist;
|
||||
}
|
||||
/**
|
||||
* Check if an IP is blocked
|
||||
*/
|
||||
export function isIPBlocked(ip) {
|
||||
return blocklist.blockedIPs.includes(ip);
|
||||
}
|
||||
/**
|
||||
* Check if a user ID is blocked
|
||||
*/
|
||||
export function isUserBlocked(userId) {
|
||||
return blocklist.blockedUserIds.includes(userId);
|
||||
}
|
||||
/**
|
||||
* Check if an auth token is blocked
|
||||
*/
|
||||
export function isTokenBlocked(token) {
|
||||
return blocklist.blockedTokens.includes(token);
|
||||
}
|
||||
/**
|
||||
* Extract IP address from request
|
||||
*/
|
||||
function getClientIP(c) {
|
||||
const forwarded = c.req.header('x-forwarded-for');
|
||||
if (forwarded) {
|
||||
return forwarded.split(',')[0].trim();
|
||||
}
|
||||
return c.req.header('x-real-ip') || 'unknown';
|
||||
}
|
||||
/**
|
||||
* Extract user ID from authorization header
|
||||
* This is a simple implementation - adjust based on your auth strategy
|
||||
*/
|
||||
function getUserId(c) {
|
||||
const authHeader = c.req.header('authorization');
|
||||
if (!authHeader)
|
||||
return null;
|
||||
// Simple extraction - in production, you'd decode JWT or validate token
|
||||
// For now, we'll use the auth header as-is for blocklist checking
|
||||
return authHeader;
|
||||
}
|
||||
/**
|
||||
* Blocklist middleware
|
||||
* Blocks requests from blacklisted IPs, users, or tokens
|
||||
*/
|
||||
export async function blocklistMiddleware(c, next) {
|
||||
const ip = getClientIP(c);
|
||||
const authHeader = c.req.header('authorization');
|
||||
const userId = getUserId(c);
|
||||
// Check if IP is blocked
|
||||
if (isIPBlocked(ip)) {
|
||||
return c.json({
|
||||
error: 'Forbidden',
|
||||
message: 'Your IP address has been blocked',
|
||||
}, 403);
|
||||
}
|
||||
// Check if auth token is blocked
|
||||
if (authHeader && isTokenBlocked(authHeader)) {
|
||||
return c.json({
|
||||
error: 'Forbidden',
|
||||
message: 'Your access token has been blocked',
|
||||
}, 403);
|
||||
}
|
||||
// Check if user ID is blocked
|
||||
if (userId && isUserBlocked(userId)) {
|
||||
return c.json({
|
||||
error: 'Forbidden',
|
||||
message: 'Your account has been blocked',
|
||||
}, 403);
|
||||
}
|
||||
await next();
|
||||
}
|
||||
// Load blocklist on module initialization
|
||||
loadBlocklist();
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYmxvY2tsaXN0LmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL21pZGRsZXdhcmUvYmxvY2tsaXN0LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUNBLE9BQU8sRUFBRSxZQUFZLEVBQUUsTUFBTSxJQUFJLENBQUE7QUFDakMsT0FBTyxFQUFFLElBQUksRUFBRSxPQUFPLEVBQUUsTUFBTSxNQUFNLENBQUE7QUFDcEMsT0FBTyxFQUFFLGFBQWEsRUFBRSxNQUFNLEtBQUssQ0FBQTtBQUVuQyxNQUFNLFVBQVUsR0FBRyxhQUFhLENBQUMsTUFBTSxDQUFDLElBQUksQ0FBQyxHQUFHLENBQUMsQ0FBQTtBQUNqRCxNQUFNLFNBQVMsR0FBRyxPQUFPLENBQUMsVUFBVSxDQUFDLENBQUE7QUFRckMsSUFBSSxTQUFTLEdBQWM7SUFDdkIsVUFBVSxFQUFFLEVBQUU7SUFDZCxjQUFjLEVBQUUsRUFBRTtJQUNsQixhQUFhLEVBQUUsRUFBRTtDQUNwQixDQUFBO0FBRUQ7O0dBRUc7QUFDSCxNQUFNLFVBQVUsYUFBYTtJQUN6QixJQUFJLENBQUM7UUFDRCxNQUFNLGFBQWEsR0FBRyxJQUFJLENBQUMsT0FBTyxDQUFDLEdBQUcsRUFBRSxFQUFFLFFBQVEsRUFBRSxnQkFBZ0IsQ0FBQyxDQUFBO1FBQ3JFLE1BQU0sSUFBSSxHQUFHLFlBQVksQ0FBQyxhQUFhLEVBQUUsT0FBTyxDQUFDLENBQUE7UUFDakQsU0FBUyxHQUFHLElBQUksQ0FBQyxLQUFLLENBQUMsSUFBSSxDQUFDLENBQUE7UUFDNUIsT0FBTyxTQUFTLENBQUE7SUFDcEIsQ0FBQztJQUFDLE9BQU8sS0FBSyxFQUFFLENBQUM7UUFDYixPQUFPLENBQUMsS0FBSyxDQUFDLDJCQUEyQixFQUFFLEtBQUssQ0FBQyxDQUFBO1FBQ2pELE9BQU8sU0FBUyxDQUFBO0lBQ3BCLENBQUM7QUFDTCxDQUFDO0FBRUQ7O0dBRUc7QUFDSCxNQUFNLFVBQVUsWUFBWTtJQUN4QixPQUFPLFNBQVMsQ0FBQTtBQUNwQixDQUFDO0FBRUQ7O0dBRUc7QUFDSCxNQUFNLFVBQVUsV0FBVyxDQUFDLEVBQVU7SUFDbEMsT0FBTyxTQUFTLENBQUMsVUFBVSxDQUFDLFFBQVEsQ0FBQyxFQUFFLENBQUMsQ0FBQTtBQUM1QyxDQUFDO0FBRUQ7O0dBRUc7QUFDSCxNQUFNLFVBQVUsYUFBYSxDQUFDLE1BQWM7SUFDeEMsT0FBTyxTQUFTLENBQUMsY0FBYyxDQUFDLFFBQVEsQ0FBQyxNQUFNLENBQUMsQ0FBQTtBQUNwRCxDQUFDO0FBRUQ7O0dBRUc7QUFDSCxNQUFNLFVBQVUsY0FBYyxDQUFDLEtBQWE7SUFDeEMsT0FBTyxTQUFTLENBQUMsYUFBYSxDQUFDLFFBQVEsQ0FBQyxLQUFLLENBQUMsQ0FBQTtBQUNsRCxDQUFDO0FBRUQ7O0dBRUc7QUFDSCxTQUFTLFdBQVcsQ0FBQyxDQUFVO0lBQzNCLE1BQU0sU0FBUyxHQUFHLENBQUMsQ0FBQyxHQUFHLENBQUMsTUFBTSxDQUFDLGlCQUFpQixDQUFDLENBQUE7SUFDakQsSUFBSSxTQUFTLEVBQUUsQ0FBQztRQUNaLE9BQU8sU0FBUyxDQUFDLEtBQUssQ0FBQyxHQUFHLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxJQUFJLEVBQUUsQ0FBQTtJQUN6QyxDQUFDO0lBQ0QsT0FBTyxDQUFDLENBQUMsR0FBRyxDQUFDLE1BQU0sQ0FBQyxXQUFXLENBQUMsSUFBSSxTQUFTLENBQUE7QUFDakQsQ0FBQztBQUVEOzs7R0FHRztBQUNILFNBQVMsU0FBUyxDQUFDLENBQVU7SUFDekIsTUFBTSxVQUFVLEdBQUcsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxNQUFNLENBQUMsZUFBZSxDQUFDLENBQUE7SUFDaEQsSUFBSSxDQUFDLFVBQVU7UUFBRSxPQUFPLElBQUksQ0FBQTtJQUU1Qix3RUFBd0U7SUFDeEUsa0VBQWtFO0lBQ2xFLE9BQU8sVUFBVSxDQUFBO0FBQ3JCLENBQUM7QUFFRDs7O0dBR0c7QUFDSCxNQUFNLENBQUMsS0FBSyxVQUFVLG1CQUFtQixDQUFDLENBQVUsRUFBRSxJQUFVO0lBQzVELE1BQU0sRUFBRSxHQUFHLFdBQVcsQ0FBQyxDQUFDLENBQUMsQ0FBQTtJQUN6QixNQUFNLFVBQVUsR0FBRyxDQUFDLENBQUMsR0FBRyxDQUFDLE1BQU0sQ0FBQyxlQUFlLENBQUMsQ0FBQTtJQUNoRCxNQUFNLE1BQU0sR0FBRyxTQUFTLENBQUMsQ0FBQyxDQUFDLENBQUE7SUFFM0IseUJBQXlCO0lBQ3pCLElBQUksV0FBVyxDQUFDLEVBQUUsQ0FBQyxFQUFFLENBQUM7UUFDbEIsT0FBTyxDQUFDLENBQUMsSUFBSSxDQUNUO1lBQ0ksS0FBSyxFQUFFLFdBQVc7WUFDbEIsT0FBTyxFQUFFLGtDQUFrQztTQUM5QyxFQUNELEdBQUcsQ0FDTixDQUFBO0lBQ0wsQ0FBQztJQUVELGlDQUFpQztJQUNqQyxJQUFJLFVBQVUsSUFBSSxjQUFjLENBQUMsVUFBVSxDQUFDLEVBQUUsQ0FBQztRQUMzQyxPQUFPLENBQUMsQ0FBQyxJQUFJLENBQ1Q7WUFDSSxLQUFLLEVBQUUsV0FBVztZQUNsQixPQUFPLEVBQUUsb0NBQW9DO1NBQ2hELEVBQ0QsR0FBRyxDQUNOLENBQUE7SUFDTCxDQUFDO0lBRUQsOEJBQThCO0lBQzlCLElBQUksTUFBTSxJQUFJLGFBQWEsQ0FBQyxNQUFNLENBQUMsRUFBRSxDQUFDO1FBQ2xDLE9BQU8sQ0FBQyxDQUFDLElBQUksQ0FDVDtZQUNJLEtBQUssRUFBRSxXQUFXO1lBQ2xCLE9BQU8sRUFBRSwrQkFBK0I7U0FDM0MsRUFDRCxHQUFHLENBQ04sQ0FBQTtJQUNMLENBQUM7SUFFRCxNQUFNLElBQUksRUFBRSxDQUFBO0FBQ2hCLENBQUM7QUFFRCwwQ0FBMEM7QUFDMUMsYUFBYSxFQUFFLENBQUEifQ==
|
||||
92
dist-in/middleware/rateLimiter.js
Normal file
92
dist-in/middleware/rateLimiter.js
Normal file
@ -0,0 +1,92 @@
|
||||
import { rateLimiter } from 'hono-rate-limiter';
|
||||
import { recordViolation } from './autoBan.js';
|
||||
// Rate limit configuration from environment variables
|
||||
const RATE_LIMIT_MAX = parseInt(process.env.RATE_LIMIT_MAX || '1', 10);
|
||||
const RATE_LIMIT_WINDOW_MS = parseInt(process.env.RATE_LIMIT_WINDOW_MS || '50', 10);
|
||||
console.log('🔒 Rate Limiter Configuration:');
|
||||
console.log(` Max: ${RATE_LIMIT_MAX} requests per ${RATE_LIMIT_WINDOW_MS}ms`);
|
||||
console.log(` Auto-ban threshold: ${process.env.AUTO_BAN_THRESHOLD || 10} violations`);
|
||||
/**
|
||||
* Rate limiter middleware configuration
|
||||
* Limits requests per user/IP address
|
||||
*/
|
||||
export const apiRateLimiter = rateLimiter({
|
||||
windowMs: RATE_LIMIT_WINDOW_MS, // Time window in milliseconds
|
||||
limit: RATE_LIMIT_MAX, // Max requests per window
|
||||
standardHeaders: 'draft-6', // Return rate limit info in headers
|
||||
keyGenerator: (c) => {
|
||||
// Try to get user ID from auth header, fallback to IP
|
||||
const authHeader = c.req.header('authorization');
|
||||
if (authHeader) {
|
||||
// Extract user ID from JWT or auth token if available
|
||||
// For now, use the auth header as key
|
||||
return `user:${authHeader}`;
|
||||
}
|
||||
// Fallback to IP address
|
||||
const forwarded = c.req.header('x-forwarded-for');
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown';
|
||||
return `ip:${ip}`;
|
||||
},
|
||||
handler: (c) => {
|
||||
// Record violation for auto-ban tracking
|
||||
const authHeader = c.req.header('authorization');
|
||||
let key;
|
||||
if (authHeader) {
|
||||
key = `user:${authHeader}`;
|
||||
}
|
||||
else {
|
||||
const forwarded = c.req.header('x-forwarded-for');
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown';
|
||||
key = `ip:${ip}`;
|
||||
}
|
||||
console.log(`⚠️ Rate limit exceeded for ${key}`);
|
||||
recordViolation(key);
|
||||
return c.json({
|
||||
error: 'Too many requests',
|
||||
message: `Rate limit exceeded. Maximum ${RATE_LIMIT_MAX} requests per ${RATE_LIMIT_WINDOW_MS}ms`,
|
||||
}, 429);
|
||||
},
|
||||
});
|
||||
/**
|
||||
* Custom rate limiter for specific endpoints with different limits
|
||||
*/
|
||||
export function createCustomRateLimiter(limit, windowMs) {
|
||||
return rateLimiter({
|
||||
windowMs,
|
||||
limit,
|
||||
standardHeaders: 'draft-6',
|
||||
keyGenerator: (c) => {
|
||||
const authHeader = c.req.header('authorization');
|
||||
if (authHeader) {
|
||||
return `user:${authHeader}`;
|
||||
}
|
||||
const forwarded = c.req.header('x-forwarded-for');
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown';
|
||||
return `ip:${ip}`;
|
||||
},
|
||||
handler: (c) => {
|
||||
// Record violation for auto-ban tracking
|
||||
const authHeader = c.req.header('authorization');
|
||||
let key;
|
||||
if (authHeader) {
|
||||
key = `user:${authHeader}`;
|
||||
}
|
||||
else {
|
||||
const forwarded = c.req.header('x-forwarded-for');
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown';
|
||||
key = `ip:${ip}`;
|
||||
}
|
||||
recordViolation(key);
|
||||
return c.json({
|
||||
error: 'Too many requests',
|
||||
message: `Rate limit exceeded. Maximum ${limit} requests per ${windowMs}ms`,
|
||||
}, 429);
|
||||
},
|
||||
});
|
||||
}
|
||||
// Export configuration for testing
|
||||
export const rateLimitConfig = {
|
||||
max: RATE_LIMIT_MAX,
|
||||
windowMs: RATE_LIMIT_WINDOW_MS,
|
||||
};
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicmF0ZUxpbWl0ZXIuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvbWlkZGxld2FyZS9yYXRlTGltaXRlci50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFDQSxPQUFPLEVBQUUsV0FBVyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFDL0MsT0FBTyxFQUFFLGVBQWUsRUFBRSxNQUFNLGNBQWMsQ0FBQTtBQUU5QyxzREFBc0Q7QUFDdEQsTUFBTSxjQUFjLEdBQUcsUUFBUSxDQUFDLE9BQU8sQ0FBQyxHQUFHLENBQUMsY0FBYyxJQUFJLEdBQUcsRUFBRSxFQUFFLENBQUMsQ0FBQTtBQUN0RSxNQUFNLG9CQUFvQixHQUFHLFFBQVEsQ0FBQyxPQUFPLENBQUMsR0FBRyxDQUFDLG9CQUFvQixJQUFJLElBQUksRUFBRSxFQUFFLENBQUMsQ0FBQTtBQUVuRixPQUFPLENBQUMsR0FBRyxDQUFDLGdDQUFnQyxDQUFDLENBQUE7QUFDN0MsT0FBTyxDQUFDLEdBQUcsQ0FBQyxXQUFXLGNBQWMsaUJBQWlCLG9CQUFvQixJQUFJLENBQUMsQ0FBQTtBQUMvRSxPQUFPLENBQUMsR0FBRyxDQUFDLDBCQUEwQixPQUFPLENBQUMsR0FBRyxDQUFDLGtCQUFrQixJQUFJLEVBQUUsYUFBYSxDQUFDLENBQUE7QUFHeEY7OztHQUdHO0FBQ0gsTUFBTSxDQUFDLE1BQU0sY0FBYyxHQUFHLFdBQVcsQ0FBQztJQUN0QyxRQUFRLEVBQUUsb0JBQW9CLEVBQUUsOEJBQThCO0lBQzlELEtBQUssRUFBRSxjQUFjLEVBQUUsMEJBQTBCO0lBQ2pELGVBQWUsRUFBRSxTQUFTLEVBQUUsb0NBQW9DO0lBQ2hFLFlBQVksRUFBRSxDQUFDLENBQVUsRUFBRSxFQUFFO1FBQ3pCLHNEQUFzRDtRQUN0RCxNQUFNLFVBQVUsR0FBRyxDQUFDLENBQUMsR0FBRyxDQUFDLE1BQU0sQ0FBQyxlQUFlLENBQUMsQ0FBQTtRQUNoRCxJQUFJLFVBQVUsRUFBRSxDQUFDO1lBQ2Isc0RBQXNEO1lBQ3RELHNDQUFzQztZQUN0QyxPQUFPLFFBQVEsVUFBVSxFQUFFLENBQUE7UUFDL0IsQ0FBQztRQUVELHlCQUF5QjtRQUN6QixNQUFNLFNBQVMsR0FBRyxDQUFDLENBQUMsR0FBRyxDQUFDLE1BQU0sQ0FBQyxpQkFBaUIsQ0FBQyxDQUFBO1FBQ2pELE1BQU0sRUFBRSxHQUFHLFNBQVMsQ0FBQyxDQUFDLENBQUMsU0FBUyxDQUFDLEtBQUssQ0FBQyxHQUFHLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxNQUFNLENBQUMsV0FBVyxDQUFDLElBQUksU0FBUyxDQUFBO1FBQ3ZGLE9BQU8sTUFBTSxFQUFFLEVBQUUsQ0FBQTtJQUNyQixDQUFDO0lBQ0QsT0FBTyxFQUFFLENBQUMsQ0FBVSxFQUFFLEVBQUU7UUFDcEIseUNBQXlDO1FBQ3pDLE1BQU0sVUFBVSxHQUFHLENBQUMsQ0FBQyxHQUFHLENBQUMsTUFBTSxDQUFDLGVBQWUsQ0FBQyxDQUFBO1FBQ2hELElBQUksR0FBVyxDQUFBO1FBQ2YsSUFBSSxVQUFVLEVBQUUsQ0FBQztZQUNiLEdBQUcsR0FBRyxRQUFRLFVBQVUsRUFBRSxDQUFBO1FBQzlCLENBQUM7YUFBTSxDQUFDO1lBQ0osTUFBTSxTQUFTLEdBQUcsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxNQUFNLENBQUMsaUJBQWlCLENBQUMsQ0FBQTtZQUNqRCxNQUFNLEVBQUUsR0FBRyxTQUFTLENBQUMsQ0FBQyxDQUFDLFNBQVMsQ0FBQyxLQUFLLENBQUMsR0FBRyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxHQUFHLENBQUMsTUFBTSxDQUFDLFdBQVcsQ0FBQyxJQUFJLFNBQVMsQ0FBQTtZQUN2RixHQUFHLEdBQUcsTUFBTSxFQUFFLEVBQUUsQ0FBQTtRQUNwQixDQUFDO1FBRUQsT0FBTyxDQUFDLEdBQUcsQ0FBQywrQkFBK0IsR0FBRyxFQUFFLENBQUMsQ0FBQTtRQUNqRCxlQUFlLENBQUMsR0FBRyxDQUFDLENBQUE7UUFFcEIsT0FBTyxDQUFDLENBQUMsSUFBSSxDQUNUO1lBQ0ksS0FBSyxFQUFFLG1CQUFtQjtZQUMxQixPQUFPLEVBQUUsZ0NBQWdDLGNBQWMsaUJBQWlCLG9CQUFvQixJQUFJO1NBQ25HLEVBQ0QsR0FBRyxDQUNOLENBQUE7SUFDTCxDQUFDO0NBQ0osQ0FBQyxDQUFBO0FBRUY7O0dBRUc7QUFDSCxNQUFNLFVBQVUsdUJBQXVCLENBQUMsS0FBYSxFQUFFLFFBQWdCO0lBQ25FLE9BQU8sV0FBVyxDQUFDO1FBQ2YsUUFBUTtRQUNSLEtBQUs7UUFDTCxlQUFlLEVBQUUsU0FBUztRQUMxQixZQUFZLEVBQUUsQ0FBQyxDQUFVLEVBQUUsRUFBRTtZQUN6QixNQUFNLFVBQVUsR0FBRyxDQUFDLENBQUMsR0FBRyxDQUFDLE1BQU0sQ0FBQyxlQUFlLENBQUMsQ0FBQTtZQUNoRCxJQUFJLFVBQVUsRUFBRSxDQUFDO2dCQUNiLE9BQU8sUUFBUSxVQUFVLEVBQUUsQ0FBQTtZQUMvQixDQUFDO1lBQ0QsTUFBTSxTQUFTLEdBQUcsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxNQUFNLENBQUMsaUJBQWlCLENBQUMsQ0FBQTtZQUNqRCxNQUFNLEVBQUUsR0FBRyxTQUFTLENBQUMsQ0FBQyxDQUFDLFNBQVMsQ0FBQyxLQUFLLENBQUMsR0FBRyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxHQUFHLENBQUMsTUFBTSxDQUFDLFdBQVcsQ0FBQyxJQUFJLFNBQVMsQ0FBQTtZQUN2RixPQUFPLE1BQU0sRUFBRSxFQUFFLENBQUE7UUFDckIsQ0FBQztRQUNELE9BQU8sRUFBRSxDQUFDLENBQVUsRUFBRSxFQUFFO1lBQ3BCLHlDQUF5QztZQUN6QyxNQUFNLFVBQVUsR0FBRyxDQUFDLENBQUMsR0FBRyxDQUFDLE1BQU0sQ0FBQyxlQUFlLENBQUMsQ0FBQTtZQUNoRCxJQUFJLEdBQVcsQ0FBQTtZQUNmLElBQUksVUFBVSxFQUFFLENBQUM7Z0JBQ2IsR0FBRyxHQUFHLFFBQVEsVUFBVSxFQUFFLENBQUE7WUFDOUIsQ0FBQztpQkFBTSxDQUFDO2dCQUNKLE1BQU0sU0FBUyxHQUFHLENBQUMsQ0FBQyxHQUFHLENBQUMsTUFBTSxDQUFDLGlCQUFpQixDQUFDLENBQUE7Z0JBQ2pELE1BQU0sRUFBRSxHQUFHLFNBQVMsQ0FBQyxDQUFDLENBQUMsU0FBUyxDQUFDLEtBQUssQ0FBQyxHQUFHLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxNQUFNLENBQUMsV0FBVyxDQUFDLElBQUksU0FBUyxDQUFBO2dCQUN2RixHQUFHLEdBQUcsTUFBTSxFQUFFLEVBQUUsQ0FBQTtZQUNwQixDQUFDO1lBQ0QsZUFBZSxDQUFDLEdBQUcsQ0FBQyxDQUFBO1lBRXBCLE9BQU8sQ0FBQyxDQUFDLElBQUksQ0FDVDtnQkFDSSxLQUFLLEVBQUUsbUJBQW1CO2dCQUMxQixPQUFPLEVBQUUsZ0NBQWdDLEtBQUssaUJBQWlCLFFBQVEsSUFBSTthQUM5RSxFQUNELEdBQUcsQ0FDTixDQUFBO1FBQ0wsQ0FBQztLQUNKLENBQUMsQ0FBQTtBQUNOLENBQUM7QUFFRCxtQ0FBbUM7QUFDbkMsTUFBTSxDQUFDLE1BQU0sZUFBZSxHQUFHO0lBQzNCLEdBQUcsRUFBRSxjQUFjO0lBQ25CLFFBQVEsRUFBRSxvQkFBb0I7Q0FDakMsQ0FBQSJ9
|
||||
264
dist-in/middleware/usageTracking.js
Normal file
264
dist-in/middleware/usageTracking.js
Normal file
File diff suppressed because one or more lines are too long
102
dist-in/products/AbstractProduct.js
Normal file
102
dist-in/products/AbstractProduct.js
Normal file
@ -0,0 +1,102 @@
|
||||
import EventEmitter from 'events';
|
||||
import { createHash } from 'crypto';
|
||||
import { streamSSE } from 'hono/streaming';
|
||||
import { ProductErrorCode } from './enums.js';
|
||||
import { ProductError } from './errors.js';
|
||||
import { logger } from '../commons/logger.js';
|
||||
export class AbstractProduct extends EventEmitter {
|
||||
async start(boss) {
|
||||
try {
|
||||
await this.onStart(boss);
|
||||
}
|
||||
catch (error) {
|
||||
throw new ProductError(ProductErrorCode.START_FAILED, {
|
||||
message: `Failed to start product ${this.id}: ${error.message}`,
|
||||
originalError: error
|
||||
});
|
||||
}
|
||||
}
|
||||
async onStart(boss) {
|
||||
// Optional hook for subclasses
|
||||
}
|
||||
async stop() {
|
||||
try {
|
||||
await this.onStop();
|
||||
}
|
||||
catch (error) {
|
||||
throw new ProductError(ProductErrorCode.STOP_FAILED, {
|
||||
message: `Failed to stop product ${this.id}: ${error.message}`,
|
||||
originalError: error
|
||||
});
|
||||
}
|
||||
}
|
||||
async onStop() {
|
||||
// Optional hook
|
||||
}
|
||||
async pause() {
|
||||
// No-op for now as we removed pgboss
|
||||
}
|
||||
async resume() {
|
||||
// No-op for now as we removed pgboss
|
||||
}
|
||||
async handleStream(c, options) {
|
||||
const { data, userId, forceRefresh, fetcher, cacheChecker } = options;
|
||||
const inputHash = this.generateHash(data);
|
||||
return streamSSE(c, async (stream) => {
|
||||
try {
|
||||
await stream.writeSSE({
|
||||
event: 'progress',
|
||||
data: JSON.stringify({ stage: 'starting', percent: 0 })
|
||||
});
|
||||
if (!forceRefresh && cacheChecker) {
|
||||
await stream.writeSSE({
|
||||
event: 'progress',
|
||||
data: JSON.stringify({ stage: 'checking_cache', percent: 10 })
|
||||
});
|
||||
const cached = await cacheChecker(inputHash);
|
||||
if (cached) {
|
||||
for (let i = 0; i < cached.length; i++) {
|
||||
await stream.writeSSE({
|
||||
event: 'result',
|
||||
data: JSON.stringify(cached[i])
|
||||
});
|
||||
}
|
||||
await stream.writeSSE({
|
||||
event: 'complete',
|
||||
data: JSON.stringify({ total: cached.length, cached: true })
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
await stream.writeSSE({
|
||||
event: 'progress',
|
||||
data: JSON.stringify({ stage: 'fetching_from_api', percent: 20 })
|
||||
});
|
||||
const results = await fetcher(data, userId);
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
await stream.writeSSE({
|
||||
event: 'result',
|
||||
data: JSON.stringify(results[i])
|
||||
});
|
||||
}
|
||||
await stream.writeSSE({
|
||||
event: 'complete',
|
||||
data: JSON.stringify({ total: results.length, cached: false })
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
logger.error(error, `[${this.id}] Stream error`);
|
||||
await stream.writeSSE({
|
||||
event: 'error',
|
||||
data: JSON.stringify({ error: error.message || 'Internal Server Error' })
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
// Helper for hashing
|
||||
generateHash(params) {
|
||||
const normalizedInput = JSON.stringify(params, Object.keys(params).sort());
|
||||
return createHash('sha256').update(normalizedInput).digest('hex');
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiQWJzdHJhY3RQcm9kdWN0LmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL3Byb2R1Y3RzL0Fic3RyYWN0UHJvZHVjdC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLFlBQVksTUFBTSxRQUFRLENBQUM7QUFDbEMsT0FBTyxFQUFFLFVBQVUsRUFBRSxNQUFNLFFBQVEsQ0FBQztBQUNwQyxPQUFPLEVBQUUsU0FBUyxFQUFFLE1BQU0sZ0JBQWdCLENBQUM7QUFFM0MsT0FBTyxFQUFFLGdCQUFnQixFQUFFLE1BQU0sWUFBWSxDQUFDO0FBQzlDLE9BQU8sRUFBRSxZQUFZLEVBQUUsTUFBTSxhQUFhLENBQUM7QUFDM0MsT0FBTyxFQUFFLE1BQU0sRUFBRSxNQUFNLHNCQUFzQixDQUFDO0FBZ0I5QyxNQUFNLE9BQWdCLGVBQWdDLFNBQVEsWUFBWTtJQU90RSxLQUFLLENBQUMsS0FBSyxDQUFDLElBQVU7UUFDbEIsSUFBSSxDQUFDO1lBQ0QsTUFBTSxJQUFJLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxDQUFDO1FBQzdCLENBQUM7UUFBQyxPQUFPLEtBQVUsRUFBRSxDQUFDO1lBQ2xCLE1BQU0sSUFBSSxZQUFZLENBQUMsZ0JBQWdCLENBQUMsWUFBWSxFQUFFO2dCQUNsRCxPQUFPLEVBQUUsMkJBQTJCLElBQUksQ0FBQyxFQUFFLEtBQUssS0FBSyxDQUFDLE9BQU8sRUFBRTtnQkFDL0QsYUFBYSxFQUFFLEtBQUs7YUFDdkIsQ0FBQyxDQUFDO1FBQ1AsQ0FBQztJQUNMLENBQUM7SUFFUyxLQUFLLENBQUMsT0FBTyxDQUFDLElBQVU7UUFDOUIsK0JBQStCO0lBQ25DLENBQUM7SUFFRCxLQUFLLENBQUMsSUFBSTtRQUNOLElBQUksQ0FBQztZQUNELE1BQU0sSUFBSSxDQUFDLE1BQU0sRUFBRSxDQUFDO1FBQ3hCLENBQUM7UUFBQyxPQUFPLEtBQVUsRUFBRSxDQUFDO1lBQ2xCLE1BQU0sSUFBSSxZQUFZLENBQUMsZ0JBQWdCLENBQUMsV0FBVyxFQUFFO2dCQUNqRCxPQUFPLEVBQUUsMEJBQTBCLElBQUksQ0FBQyxFQUFFLEtBQUssS0FBSyxDQUFDLE9BQU8sRUFBRTtnQkFDOUQsYUFBYSxFQUFFLEtBQUs7YUFDdkIsQ0FBQyxDQUFDO1FBQ1AsQ0FBQztJQUNMLENBQUM7SUFFUyxLQUFLLENBQUMsTUFBTTtRQUNsQixnQkFBZ0I7SUFDcEIsQ0FBQztJQUVELEtBQUssQ0FBQyxLQUFLO1FBQ1AscUNBQXFDO0lBQ3pDLENBQUM7SUFFRCxLQUFLLENBQUMsTUFBTTtRQUNSLHFDQUFxQztJQUN6QyxDQUFDO0lBRVMsS0FBSyxDQUFDLFlBQVksQ0FBQyxDQUFNLEVBQUUsT0FBc0I7UUFDdkQsTUFBTSxFQUFFLElBQUksRUFBRSxNQUFNLEVBQUUsWUFBWSxFQUFFLE9BQU8sRUFBRSxZQUFZLEVBQUUsR0FBRyxPQUFPLENBQUM7UUFFdEUsTUFBTSxTQUFTLEdBQUcsSUFBSSxDQUFDLFlBQVksQ0FBQyxJQUFJLENBQUMsQ0FBQztRQUUxQyxPQUFPLFNBQVMsQ0FBQyxDQUFDLEVBQUUsS0FBSyxFQUFFLE1BQU0sRUFBRSxFQUFFO1lBQ2pDLElBQUksQ0FBQztnQkFDRCxNQUFNLE1BQU0sQ0FBQyxRQUFRLENBQUM7b0JBQ2xCLEtBQUssRUFBRSxVQUFVO29CQUNqQixJQUFJLEVBQUUsSUFBSSxDQUFDLFNBQVMsQ0FBQyxFQUFFLEtBQUssRUFBRSxVQUFVLEVBQUUsT0FBTyxFQUFFLENBQUMsRUFBRSxDQUFDO2lCQUMxRCxDQUFDLENBQUM7Z0JBRUgsSUFBSSxDQUFDLFlBQVksSUFBSSxZQUFZLEVBQUUsQ0FBQztvQkFDaEMsTUFBTSxNQUFNLENBQUMsUUFBUSxDQUFDO3dCQUNsQixLQUFLLEVBQUUsVUFBVTt3QkFDakIsSUFBSSxFQUFFLElBQUksQ0FBQyxTQUFTLENBQUMsRUFBRSxLQUFLLEVBQUUsZ0JBQWdCLEVBQUUsT0FBTyxFQUFFLEVBQUUsRUFBRSxDQUFDO3FCQUNqRSxDQUFDLENBQUM7b0JBRUgsTUFBTSxNQUFNLEdBQUcsTUFBTSxZQUFZLENBQUMsU0FBUyxDQUFDLENBQUM7b0JBQzdDLElBQUksTUFBTSxFQUFFLENBQUM7d0JBQ1QsS0FBSyxJQUFJLENBQUMsR0FBRyxDQUFDLEVBQUUsQ0FBQyxHQUFHLE1BQU0sQ0FBQyxNQUFNLEVBQUUsQ0FBQyxFQUFFLEVBQUUsQ0FBQzs0QkFDckMsTUFBTSxNQUFNLENBQUMsUUFBUSxDQUFDO2dDQUNsQixLQUFLLEVBQUUsUUFBUTtnQ0FDZixJQUFJLEVBQUUsSUFBSSxDQUFDLFNBQVMsQ0FBQyxNQUFNLENBQUMsQ0FBQyxDQUFDLENBQUM7NkJBQ2xDLENBQUMsQ0FBQzt3QkFDUCxDQUFDO3dCQUNELE1BQU0sTUFBTSxDQUFDLFFBQVEsQ0FBQzs0QkFDbEIsS0FBSyxFQUFFLFVBQVU7NEJBQ2pCLElBQUksRUFBRSxJQUFJLENBQUMsU0FBUyxDQUFDLEVBQUUsS0FBSyxFQUFFLE1BQU0sQ0FBQyxNQUFNLEVBQUUsTUFBTSxFQUFFLElBQUksRUFBRSxDQUFDO3lCQUMvRCxDQUFDLENBQUM7d0JBQ0gsT0FBTztvQkFDWCxDQUFDO2dCQUNMLENBQUM7Z0JBRUQsTUFBTSxNQUFNLENBQUMsUUFBUSxDQUFDO29CQUNsQixLQUFLLEVBQUUsVUFBVTtvQkFDakIsSUFBSSxFQUFFLElBQUksQ0FBQyxTQUFTLENBQUMsRUFBRSxLQUFLLEVBQUUsbUJBQW1CLEVBQUUsT0FBTyxFQUFFLEVBQUUsRUFBRSxDQUFDO2lCQUNwRSxDQUFDLENBQUM7Z0JBRUgsTUFBTSxPQUFPLEdBQUcsTUFBTSxPQUFPLENBQUMsSUFBSSxFQUFFLE1BQU0sQ0FBQyxDQUFDO2dCQUU1QyxLQUFLLElBQUksQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDLEdBQUcsT0FBTyxDQUFDLE1BQU0sRUFBRSxDQUFDLEVBQUUsRUFBRSxDQUFDO29CQUN0QyxNQUFNLE1BQU0sQ0FBQyxRQUFRLENBQUM7d0JBQ2xCLEtBQUssRUFBRSxRQUFRO3dCQUNmLElBQUksRUFBRSxJQUFJLENBQUMsU0FBUyxDQUFDLE9BQU8sQ0FBQyxDQUFDLENBQUMsQ0FBQztxQkFDbkMsQ0FBQyxDQUFDO2dCQUNQLENBQUM7Z0JBRUQsTUFBTSxNQUFNLENBQUMsUUFBUSxDQUFDO29CQUNsQixLQUFLLEVBQUUsVUFBVTtvQkFDakIsSUFBSSxFQUFFLElBQUksQ0FBQyxTQUFTLENBQUMsRUFBRSxLQUFLLEVBQUUsT0FBTyxDQUFDLE1BQU0sRUFBRSxNQUFNLEVBQUUsS0FBSyxFQUFFLENBQUM7aUJBQ2pFLENBQUMsQ0FBQztZQUVQLENBQUM7WUFBQyxPQUFPLEtBQVUsRUFBRSxDQUFDO2dCQUNsQixNQUFNLENBQUMsS0FBSyxDQUFDLEtBQUssRUFBRSxJQUFJLElBQUksQ0FBQyxFQUFFLGdCQUFnQixDQUFDLENBQUM7Z0JBQ2pELE1BQU0sTUFBTSxDQUFDLFFBQVEsQ0FBQztvQkFDbEIsS0FBSyxFQUFFLE9BQU87b0JBQ2QsSUFBSSxFQUFFLElBQUksQ0FBQyxTQUFTLENBQUMsRUFBRSxLQUFLLEVBQUUsS0FBSyxDQUFDLE9BQU8sSUFBSSx1QkFBdUIsRUFBRSxDQUFDO2lCQUM1RSxDQUFDLENBQUM7WUFDUCxDQUFDO1FBQ0wsQ0FBQyxDQUFDLENBQUM7SUFDUCxDQUFDO0lBRUQscUJBQXFCO0lBQ1gsWUFBWSxDQUFDLE1BQVc7UUFDOUIsTUFBTSxlQUFlLEdBQUcsSUFBSSxDQUFDLFNBQVMsQ0FBQyxNQUFNLEVBQUUsTUFBTSxDQUFDLElBQUksQ0FBQyxNQUFNLENBQUMsQ0FBQyxJQUFJLEVBQUUsQ0FBQyxDQUFDO1FBQzNFLE9BQU8sVUFBVSxDQUFDLFFBQVEsQ0FBQyxDQUFDLE1BQU0sQ0FBQyxlQUFlLENBQUMsQ0FBQyxNQUFNLENBQUMsS0FBSyxDQUFDLENBQUM7SUFDdEUsQ0FBQztDQUlKIn0=
|
||||
3
dist-in/products/EventBus.js
Normal file
3
dist-in/products/EventBus.js
Normal file
@ -0,0 +1,3 @@
|
||||
import EventEmitter from 'events';
|
||||
export const EventBus = new EventEmitter();
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiRXZlbnRCdXMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvcHJvZHVjdHMvRXZlbnRCdXMudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxZQUFZLE1BQU0sUUFBUSxDQUFDO0FBRWxDLE1BQU0sQ0FBQyxNQUFNLFFBQVEsR0FBRyxJQUFJLFlBQVksRUFBRSxDQUFDIn0=
|
||||
122
dist-in/products/analytics/index.js
Normal file
122
dist-in/products/analytics/index.js
Normal file
File diff suppressed because one or more lines are too long
81
dist-in/products/analytics/routes.js
Normal file
81
dist-in/products/analytics/routes.js
Normal file
@ -0,0 +1,81 @@
|
||||
import { createRoute, z } from '@hono/zod-openapi';
|
||||
export const getAnalyticsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/analytics',
|
||||
tags: ['Analytics'],
|
||||
summary: 'Get Analytics Data',
|
||||
description: 'Retrieve analytics data from the log file, optionally filtered by date.',
|
||||
security: [{ bearerAuth: [] }],
|
||||
request: {
|
||||
query: z.object({
|
||||
limit: z.string().optional().default('100').openapi({ description: 'Number of entries to return (default 100)' }),
|
||||
startDate: z.string().optional().openapi({ description: 'Filter entries after this date (ISO string)' }),
|
||||
endDate: z.string().optional().openapi({ description: 'Filter entries before this date (ISO string)' }),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Analytics Data',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.array(z.object({
|
||||
timestamp: z.string(),
|
||||
method: z.string(),
|
||||
path: z.string(),
|
||||
status: z.number(),
|
||||
ip: z.string(),
|
||||
userAgent: z.string().optional(),
|
||||
referer: z.string().optional(),
|
||||
userId: z.string().optional()
|
||||
})),
|
||||
},
|
||||
},
|
||||
},
|
||||
401: {
|
||||
description: 'Unauthorized',
|
||||
},
|
||||
},
|
||||
});
|
||||
export const getAnalyticsStreamRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/analytics/stream',
|
||||
tags: ['Analytics'],
|
||||
summary: 'Stream Analytics Data',
|
||||
description: 'Stream real-time analytics data via Server-Sent Events (SSE).',
|
||||
security: [{ bearerAuth: [] }],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Analytics Event Stream',
|
||||
content: {
|
||||
'text/event-stream': {
|
||||
schema: z.string(),
|
||||
},
|
||||
},
|
||||
},
|
||||
401: {
|
||||
description: 'Unauthorized',
|
||||
},
|
||||
},
|
||||
});
|
||||
export const deleteAnalyticsRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/api/analytics',
|
||||
tags: ['Analytics'],
|
||||
summary: 'Clear Analytics Data',
|
||||
description: 'Clear all analytics data from the log file.',
|
||||
security: [{ bearerAuth: [] }],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Analytics Data Cleared',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ success: z.boolean() }),
|
||||
},
|
||||
},
|
||||
},
|
||||
401: {
|
||||
description: 'Unauthorized',
|
||||
},
|
||||
},
|
||||
});
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicm91dGVzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vLi4vc3JjL3Byb2R1Y3RzL2FuYWx5dGljcy9yb3V0ZXMudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLFdBQVcsRUFBRSxDQUFDLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQztBQUVuRCxNQUFNLENBQUMsTUFBTSxpQkFBaUIsR0FBRyxXQUFXLENBQUM7SUFDekMsTUFBTSxFQUFFLEtBQUs7SUFDYixJQUFJLEVBQUUsZ0JBQWdCO0lBQ3RCLElBQUksRUFBRSxDQUFDLFdBQVcsQ0FBQztJQUNuQixPQUFPLEVBQUUsb0JBQW9CO0lBQzdCLFdBQVcsRUFBRSx5RUFBeUU7SUFDdEYsUUFBUSxFQUFFLENBQUMsRUFBRSxVQUFVLEVBQUUsRUFBRSxFQUFFLENBQUM7SUFDOUIsT0FBTyxFQUFFO1FBQ0wsS0FBSyxFQUFFLENBQUMsQ0FBQyxNQUFNLENBQUM7WUFDWixLQUFLLEVBQUUsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFDLFFBQVEsRUFBRSxDQUFDLE9BQU8sQ0FBQyxLQUFLLENBQUMsQ0FBQyxPQUFPLENBQUMsRUFBRSxXQUFXLEVBQUUsMkNBQTJDLEVBQUUsQ0FBQztZQUNqSCxTQUFTLEVBQUUsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFDLFFBQVEsRUFBRSxDQUFDLE9BQU8sQ0FBQyxFQUFFLFdBQVcsRUFBRSw2Q0FBNkMsRUFBRSxDQUFDO1lBQ3hHLE9BQU8sRUFBRSxDQUFDLENBQUMsTUFBTSxFQUFFLENBQUMsUUFBUSxFQUFFLENBQUMsT0FBTyxDQUFDLEVBQUUsV0FBVyxFQUFFLDhDQUE4QyxFQUFFLENBQUM7U0FDMUcsQ0FBQztLQUNMO0lBQ0QsU0FBUyxFQUFFO1FBQ1AsR0FBRyxFQUFFO1lBQ0QsV0FBVyxFQUFFLGdCQUFnQjtZQUM3QixPQUFPLEVBQUU7Z0JBQ0wsa0JBQWtCLEVBQUU7b0JBQ2hCLE1BQU0sRUFBRSxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxNQUFNLENBQUM7d0JBQ3JCLFNBQVMsRUFBRSxDQUFDLENBQUMsTUFBTSxFQUFFO3dCQUNyQixNQUFNLEVBQUUsQ0FBQyxDQUFDLE1BQU0sRUFBRTt3QkFDbEIsSUFBSSxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUU7d0JBQ2hCLE1BQU0sRUFBRSxDQUFDLENBQUMsTUFBTSxFQUFFO3dCQUNsQixFQUFFLEVBQUUsQ0FBQyxDQUFDLE1BQU0sRUFBRTt3QkFDZCxTQUFTLEVBQUUsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFDLFFBQVEsRUFBRTt3QkFDaEMsT0FBTyxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxRQUFRLEVBQUU7d0JBQzlCLE1BQU0sRUFBRSxDQUFDLENBQUMsTUFBTSxFQUFFLENBQUMsUUFBUSxFQUFFO3FCQUNoQyxDQUFDLENBQUM7aUJBQ047YUFDSjtTQUNKO1FBQ0QsR0FBRyxFQUFFO1lBQ0QsV0FBVyxFQUFFLGNBQWM7U0FDOUI7S0FDSjtDQUNKLENBQUMsQ0FBQztBQUVILE1BQU0sQ0FBQyxNQUFNLHVCQUF1QixHQUFHLFdBQVcsQ0FBQztJQUMvQyxNQUFNLEVBQUUsS0FBSztJQUNiLElBQUksRUFBRSx1QkFBdUI7SUFDN0IsSUFBSSxFQUFFLENBQUMsV0FBVyxDQUFDO0lBQ25CLE9BQU8sRUFBRSx1QkFBdUI7SUFDaEMsV0FBVyxFQUFFLCtEQUErRDtJQUM1RSxRQUFRLEVBQUUsQ0FBQyxFQUFFLFVBQVUsRUFBRSxFQUFFLEVBQUUsQ0FBQztJQUM5QixTQUFTLEVBQUU7UUFDUCxHQUFHLEVBQUU7WUFDRCxXQUFXLEVBQUUsd0JBQXdCO1lBQ3JDLE9BQU8sRUFBRTtnQkFDTCxtQkFBbUIsRUFBRTtvQkFDakIsTUFBTSxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUU7aUJBQ3JCO2FBQ0o7U0FDSjtRQUNELEdBQUcsRUFBRTtZQUNELFdBQVcsRUFBRSxjQUFjO1NBQzlCO0tBQ0o7Q0FFSixDQUFDLENBQUM7QUFFSCxNQUFNLENBQUMsTUFBTSxvQkFBb0IsR0FBRyxXQUFXLENBQUM7SUFDNUMsTUFBTSxFQUFFLFFBQVE7SUFDaEIsSUFBSSxFQUFFLGdCQUFnQjtJQUN0QixJQUFJLEVBQUUsQ0FBQyxXQUFXLENBQUM7SUFDbkIsT0FBTyxFQUFFLHNCQUFzQjtJQUMvQixXQUFXLEVBQUUsNkNBQTZDO0lBQzFELFFBQVEsRUFBRSxDQUFDLEVBQUUsVUFBVSxFQUFFLEVBQUUsRUFBRSxDQUFDO0lBQzlCLFNBQVMsRUFBRTtRQUNQLEdBQUcsRUFBRTtZQUNELFdBQVcsRUFBRSx3QkFBd0I7WUFDckMsT0FBTyxFQUFFO2dCQUNMLGtCQUFrQixFQUFFO29CQUNoQixNQUFNLEVBQUUsQ0FBQyxDQUFDLE1BQU0sQ0FBQyxFQUFFLE9BQU8sRUFBRSxDQUFDLENBQUMsT0FBTyxFQUFFLEVBQUUsQ0FBQztpQkFDN0M7YUFDSjtTQUNKO1FBQ0QsR0FBRyxFQUFFO1lBQ0QsV0FBVyxFQUFFLGNBQWM7U0FDOUI7S0FDSjtDQUNKLENBQUMsQ0FBQyJ9
|
||||
20
dist-in/products/enums.js
Normal file
20
dist-in/products/enums.js
Normal file
@ -0,0 +1,20 @@
|
||||
export var ProductErrorCode;
|
||||
(function (ProductErrorCode) {
|
||||
// Lifecycle Errors
|
||||
ProductErrorCode["START_FAILED"] = "PRODUCT_START_FAILED";
|
||||
ProductErrorCode["STOP_FAILED"] = "PRODUCT_STOP_FAILED";
|
||||
ProductErrorCode["PAUSE_FAILED"] = "PRODUCT_PAUSE_FAILED";
|
||||
ProductErrorCode["RESUME_FAILED"] = "PRODUCT_RESUME_FAILED";
|
||||
// Worker Errors
|
||||
ProductErrorCode["WORKER_REGISTRATION_FAILED"] = "WORKER_REGISTRATION_FAILED";
|
||||
ProductErrorCode["WORKER_NOT_FOUND"] = "WORKER_NOT_FOUND";
|
||||
// Job Errors
|
||||
ProductErrorCode["JOB_SUBMISSION_FAILED"] = "JOB_SUBMISSION_FAILED";
|
||||
ProductErrorCode["JOB_TIMEOUT"] = "JOB_TIMEOUT";
|
||||
// Configuration Errors
|
||||
ProductErrorCode["INVALID_CONFIG"] = "INVALID_CONFIG";
|
||||
ProductErrorCode["MISSING_DEPENDENCY"] = "MISSING_DEPENDENCY";
|
||||
// Generic
|
||||
ProductErrorCode["UNKNOWN_ERROR"] = "UNKNOWN_ERROR";
|
||||
})(ProductErrorCode || (ProductErrorCode = {}));
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZW51bXMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvcHJvZHVjdHMvZW51bXMudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsTUFBTSxDQUFOLElBQVksZ0JBcUJYO0FBckJELFdBQVksZ0JBQWdCO0lBQ3hCLG1CQUFtQjtJQUNuQix5REFBcUMsQ0FBQTtJQUNyQyx1REFBbUMsQ0FBQTtJQUNuQyx5REFBcUMsQ0FBQTtJQUNyQywyREFBdUMsQ0FBQTtJQUV2QyxnQkFBZ0I7SUFDaEIsNkVBQXlELENBQUE7SUFDekQseURBQXFDLENBQUE7SUFFckMsYUFBYTtJQUNiLG1FQUErQyxDQUFBO0lBQy9DLCtDQUEyQixDQUFBO0lBRTNCLHVCQUF1QjtJQUN2QixxREFBaUMsQ0FBQTtJQUNqQyw2REFBeUMsQ0FBQTtJQUV6QyxVQUFVO0lBQ1YsbURBQStCLENBQUE7QUFDbkMsQ0FBQyxFQXJCVyxnQkFBZ0IsS0FBaEIsZ0JBQWdCLFFBcUIzQiJ9
|
||||
20
dist-in/products/errors.js
Normal file
20
dist-in/products/errors.js
Normal file
@ -0,0 +1,20 @@
|
||||
export class ProductError extends Error {
|
||||
code;
|
||||
payload;
|
||||
constructor(code, payload) {
|
||||
const message = typeof payload === 'string' ? payload : payload.message;
|
||||
super(message);
|
||||
this.code = code;
|
||||
this.payload = typeof payload === 'string' ? { message: payload } : payload;
|
||||
// Restore prototype chain
|
||||
Object.setPrototypeOf(this, new.target.prototype);
|
||||
}
|
||||
toJSON() {
|
||||
return {
|
||||
code: this.code,
|
||||
message: this.message,
|
||||
payload: this.payload
|
||||
};
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZXJyb3JzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vc3JjL3Byb2R1Y3RzL2Vycm9ycy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFPQSxNQUFNLE9BQU8sWUFBYSxTQUFRLEtBQUs7SUFDbkIsSUFBSSxDQUFtQjtJQUN2QixPQUFPLENBQXNCO0lBRTdDLFlBQVksSUFBc0IsRUFBRSxPQUFxQztRQUNyRSxNQUFNLE9BQU8sR0FBRyxPQUFPLE9BQU8sS0FBSyxRQUFRLENBQUMsQ0FBQyxDQUFDLE9BQU8sQ0FBQyxDQUFDLENBQUMsT0FBTyxDQUFDLE9BQU8sQ0FBQztRQUN4RSxLQUFLLENBQUMsT0FBTyxDQUFDLENBQUM7UUFDZixJQUFJLENBQUMsSUFBSSxHQUFHLElBQUksQ0FBQztRQUNqQixJQUFJLENBQUMsT0FBTyxHQUFHLE9BQU8sT0FBTyxLQUFLLFFBQVEsQ0FBQyxDQUFDLENBQUMsRUFBRSxPQUFPLEVBQUUsT0FBTyxFQUFFLENBQUMsQ0FBQyxDQUFDLE9BQU8sQ0FBQztRQUU1RSwwQkFBMEI7UUFDMUIsTUFBTSxDQUFDLGNBQWMsQ0FBQyxJQUFJLEVBQUUsR0FBRyxDQUFDLE1BQU0sQ0FBQyxTQUFTLENBQUMsQ0FBQztJQUN0RCxDQUFDO0lBRUQsTUFBTTtRQUNGLE9BQU87WUFDSCxJQUFJLEVBQUUsSUFBSSxDQUFDLElBQUk7WUFDZixPQUFPLEVBQUUsSUFBSSxDQUFDLE9BQU87WUFDckIsT0FBTyxFQUFFLElBQUksQ0FBQyxPQUFPO1NBQ3hCLENBQUM7SUFDTixDQUFDO0NBQ0oifQ==
|
||||
84
dist-in/products/openai/handlers.js
Normal file
84
dist-in/products/openai/handlers.js
Normal file
@ -0,0 +1,84 @@
|
||||
import { logger } from '../../commons/logger.js';
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
// Helper to get Supabase credentials (copied from auth middleware logic)
|
||||
const getSupabaseCredentials = () => {
|
||||
const url = process.env.SUPABASE_URL;
|
||||
const key = process.env.SUPABASE_SERVICE_KEY;
|
||||
if (!url || !key) {
|
||||
throw new Error('Supabase credentials missing via process.env');
|
||||
}
|
||||
return { url, key };
|
||||
};
|
||||
export async function handleChatCompletions(c) {
|
||||
const userId = c.get('userId');
|
||||
if (!userId) {
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
try {
|
||||
// 1. Fetch User API Key
|
||||
const { url, key } = getSupabaseCredentials();
|
||||
const supabase = createClient(url, key);
|
||||
const { data: userSecrets, error: secretsError } = await supabase
|
||||
.from('user_secrets')
|
||||
.select('settings')
|
||||
.eq('user_id', userId)
|
||||
.maybeSingle();
|
||||
if (secretsError) {
|
||||
logger.error({ err: secretsError, userId }, 'Failed to fetch user secrets');
|
||||
return c.json({ error: 'Internal Server Error' }, 500);
|
||||
}
|
||||
// Add debug logging
|
||||
logger.debug({ userId, hasSecrets: !!userSecrets, settings: userSecrets?.settings }, 'Checking for OpenAI API key');
|
||||
const apiKey = userSecrets?.settings?.api_keys?.openai_api_key;
|
||||
if (!apiKey) {
|
||||
logger.warn({ userId }, 'Missing OpenAI API key in user_secrets');
|
||||
return c.json({ error: 'OpenAI API key not found. Please add it to your profile settings.' }, 400);
|
||||
}
|
||||
// 2. Prepare Request to OpenAI
|
||||
const body = await c.req.json();
|
||||
// Log request (sanitize sensitive data)
|
||||
logger.info({ userId, model: body.model }, 'Proxying OpenAI request');
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
};
|
||||
// 3. Make Request to OpenAI
|
||||
const response = await fetch('https://api.openai.com/v1/chat/completions', {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
// 4. Handle Response
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
logger.error({ status: response.status, errorText, userId }, 'OpenAI API error');
|
||||
// Try to parse error as JSON to return proper error object
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText);
|
||||
return c.json(errorJson, response.status);
|
||||
}
|
||||
catch (e) {
|
||||
return c.text(errorText, response.status);
|
||||
}
|
||||
}
|
||||
// 5. Stream Response if requested
|
||||
if (body.stream) {
|
||||
// Need to handle streaming response properly in Hono/Node
|
||||
// We can return the body stream directly
|
||||
return new Response(response.body, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
}
|
||||
});
|
||||
}
|
||||
const data = await response.json();
|
||||
return c.json(data);
|
||||
}
|
||||
catch (err) {
|
||||
logger.error({ err, userId }, 'OpenAI Proxy handler failed');
|
||||
return c.json({ error: 'Internal Server Error' }, 500);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaGFuZGxlcnMuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvcHJvZHVjdHMvb3BlbmFpL2hhbmRsZXJzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUNBLE9BQU8sRUFBRSxNQUFNLEVBQUUsTUFBTSx5QkFBeUIsQ0FBQztBQUNqRCxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sdUJBQXVCLENBQUM7QUFFckQseUVBQXlFO0FBQ3pFLE1BQU0sc0JBQXNCLEdBQUcsR0FBRyxFQUFFO0lBQ2hDLE1BQU0sR0FBRyxHQUFHLE9BQU8sQ0FBQyxHQUFHLENBQUMsWUFBWSxDQUFDO0lBQ3JDLE1BQU0sR0FBRyxHQUFHLE9BQU8sQ0FBQyxHQUFHLENBQUMsb0JBQW9CLENBQUM7SUFDN0MsSUFBSSxDQUFDLEdBQUcsSUFBSSxDQUFDLEdBQUcsRUFBRSxDQUFDO1FBQ2YsTUFBTSxJQUFJLEtBQUssQ0FBQyw4Q0FBOEMsQ0FBQyxDQUFDO0lBQ3BFLENBQUM7SUFDRCxPQUFPLEVBQUUsR0FBRyxFQUFFLEdBQUcsRUFBRSxDQUFDO0FBQ3hCLENBQUMsQ0FBQztBQUVGLE1BQU0sQ0FBQyxLQUFLLFVBQVUscUJBQXFCLENBQUMsQ0FBVTtJQUNsRCxNQUFNLE1BQU0sR0FBRyxDQUFDLENBQUMsR0FBRyxDQUFDLFFBQVEsQ0FBQyxDQUFDO0lBQy9CLElBQUksQ0FBQyxNQUFNLEVBQUUsQ0FBQztRQUNWLE9BQU8sQ0FBQyxDQUFDLElBQUksQ0FBQyxFQUFFLEtBQUssRUFBRSxjQUFjLEVBQUUsRUFBRSxHQUFHLENBQUMsQ0FBQztJQUNsRCxDQUFDO0lBRUQsSUFBSSxDQUFDO1FBQ0Qsd0JBQXdCO1FBQ3hCLE1BQU0sRUFBRSxHQUFHLEVBQUUsR0FBRyxFQUFFLEdBQUcsc0JBQXNCLEVBQUUsQ0FBQztRQUM5QyxNQUFNLFFBQVEsR0FBRyxZQUFZLENBQUMsR0FBRyxFQUFFLEdBQUcsQ0FBQyxDQUFDO1FBRXhDLE1BQU0sRUFBRSxJQUFJLEVBQUUsV0FBVyxFQUFFLEtBQUssRUFBRSxZQUFZLEVBQUUsR0FBRyxNQUFNLFFBQVE7YUFDNUQsSUFBSSxDQUFDLGNBQWMsQ0FBQzthQUNwQixNQUFNLENBQUMsVUFBVSxDQUFDO2FBQ2xCLEVBQUUsQ0FBQyxTQUFTLEVBQUUsTUFBTSxDQUFDO2FBQ3JCLFdBQVcsRUFBRSxDQUFDO1FBRW5CLElBQUksWUFBWSxFQUFFLENBQUM7WUFDZixNQUFNLENBQUMsS0FBSyxDQUFDLEVBQUUsR0FBRyxFQUFFLFlBQVksRUFBRSxNQUFNLEVBQUUsRUFBRSw4QkFBOEIsQ0FBQyxDQUFDO1lBQzVFLE9BQU8sQ0FBQyxDQUFDLElBQUksQ0FBQyxFQUFFLEtBQUssRUFBRSx1QkFBdUIsRUFBRSxFQUFFLEdBQUcsQ0FBQyxDQUFDO1FBQzNELENBQUM7UUFFRCxvQkFBb0I7UUFDcEIsTUFBTSxDQUFDLEtBQUssQ0FBQyxFQUFFLE1BQU0sRUFBRSxVQUFVLEVBQUUsQ0FBQyxDQUFDLFdBQVcsRUFBRSxRQUFRLEVBQUUsV0FBVyxFQUFFLFFBQVEsRUFBRSxFQUFFLDZCQUE2QixDQUFDLENBQUM7UUFFcEgsTUFBTSxNQUFNLEdBQUksV0FBVyxFQUFFLFFBQWdCLEVBQUUsUUFBUSxFQUFFLGNBQWMsQ0FBQztRQUV4RSxJQUFJLENBQUMsTUFBTSxFQUFFLENBQUM7WUFDVixNQUFNLENBQUMsSUFBSSxDQUFDLEVBQUUsTUFBTSxFQUFFLEVBQUUsd0NBQXdDLENBQUMsQ0FBQztZQUNsRSxPQUFPLENBQUMsQ0FBQyxJQUFJLENBQUMsRUFBRSxLQUFLLEVBQUUsbUVBQW1FLEVBQUUsRUFBRSxHQUFHLENBQUMsQ0FBQztRQUN2RyxDQUFDO1FBRUQsK0JBQStCO1FBQy9CLE1BQU0sSUFBSSxHQUFHLE1BQU0sQ0FBQyxDQUFDLEdBQUcsQ0FBQyxJQUFJLEVBQUUsQ0FBQztRQUVoQyx3Q0FBd0M7UUFDeEMsTUFBTSxDQUFDLElBQUksQ0FBQyxFQUFFLE1BQU0sRUFBRSxLQUFLLEVBQUUsSUFBSSxDQUFDLEtBQUssRUFBRSxFQUFFLHlCQUF5QixDQUFDLENBQUM7UUFFdEUsTUFBTSxPQUFPLEdBQTJCO1lBQ3BDLGNBQWMsRUFBRSxrQkFBa0I7WUFDbEMsZUFBZSxFQUFFLFVBQVUsTUFBTSxFQUFFO1NBQ3RDLENBQUM7UUFFRiw0QkFBNEI7UUFDNUIsTUFBTSxRQUFRLEdBQUcsTUFBTSxLQUFLLENBQUMsNENBQTRDLEVBQUU7WUFDdkUsTUFBTSxFQUFFLE1BQU07WUFDZCxPQUFPO1lBQ1AsSUFBSSxFQUFFLElBQUksQ0FBQyxTQUFTLENBQUMsSUFBSSxDQUFDO1NBQzdCLENBQUMsQ0FBQztRQUVILHFCQUFxQjtRQUNyQixJQUFJLENBQUMsUUFBUSxDQUFDLEVBQUUsRUFBRSxDQUFDO1lBQ2YsTUFBTSxTQUFTLEdBQUcsTUFBTSxRQUFRLENBQUMsSUFBSSxFQUFFLENBQUM7WUFDeEMsTUFBTSxDQUFDLEtBQUssQ0FBQyxFQUFFLE1BQU0sRUFBRSxRQUFRLENBQUMsTUFBTSxFQUFFLFNBQVMsRUFBRSxNQUFNLEVBQUUsRUFBRSxrQkFBa0IsQ0FBQyxDQUFDO1lBQ2pGLDJEQUEyRDtZQUMzRCxJQUFJLENBQUM7Z0JBQ0QsTUFBTSxTQUFTLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxTQUFTLENBQUMsQ0FBQztnQkFDeEMsT0FBTyxDQUFDLENBQUMsSUFBSSxDQUFDLFNBQVMsRUFBRSxRQUFRLENBQUMsTUFBYSxDQUFDLENBQUM7WUFDckQsQ0FBQztZQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7Z0JBQ1QsT0FBTyxDQUFDLENBQUMsSUFBSSxDQUFDLFNBQVMsRUFBRSxRQUFRLENBQUMsTUFBYSxDQUFDLENBQUM7WUFDckQsQ0FBQztRQUNMLENBQUM7UUFFRCxrQ0FBa0M7UUFDbEMsSUFBSSxJQUFJLENBQUMsTUFBTSxFQUFFLENBQUM7WUFDZCwwREFBMEQ7WUFDMUQseUNBQXlDO1lBRXpDLE9BQU8sSUFBSSxRQUFRLENBQUMsUUFBUSxDQUFDLElBQUksRUFBRTtnQkFDL0IsT0FBTyxFQUFFO29CQUNMLGNBQWMsRUFBRSxtQkFBbUI7b0JBQ25DLGVBQWUsRUFBRSxVQUFVO29CQUMzQixZQUFZLEVBQUUsWUFBWTtpQkFDN0I7YUFDSixDQUFDLENBQUM7UUFDUCxDQUFDO1FBRUQsTUFBTSxJQUFJLEdBQUcsTUFBTSxRQUFRLENBQUMsSUFBSSxFQUFFLENBQUM7UUFDbkMsT0FBTyxDQUFDLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxDQUFDO0lBRXhCLENBQUM7SUFBQyxPQUFPLEdBQVEsRUFBRSxDQUFDO1FBQ2hCLE1BQU0sQ0FBQyxLQUFLLENBQUMsRUFBRSxHQUFHLEVBQUUsTUFBTSxFQUFFLEVBQUUsNkJBQTZCLENBQUMsQ0FBQztRQUM3RCxPQUFPLENBQUMsQ0FBQyxJQUFJLENBQUMsRUFBRSxLQUFLLEVBQUUsdUJBQXVCLEVBQUUsRUFBRSxHQUFHLENBQUMsQ0FBQztJQUMzRCxDQUFDO0FBQ0wsQ0FBQyJ9
|
||||
32
dist-in/products/openai/index.js
Normal file
32
dist-in/products/openai/index.js
Normal file
@ -0,0 +1,32 @@
|
||||
import { AbstractProduct } from '../AbstractProduct.js';
|
||||
import { postChatCompletionsRoute } from './routes.js';
|
||||
import { handleChatCompletions } from './handlers.js';
|
||||
export class OpenAIProduct extends AbstractProduct {
|
||||
id = 'openai';
|
||||
jobOptions = {};
|
||||
actions = {};
|
||||
workers = [];
|
||||
routes = [];
|
||||
constructor() {
|
||||
super();
|
||||
this.initializeRoutes();
|
||||
}
|
||||
initializeRoutes() {
|
||||
// Register the chat completion route
|
||||
// We use CachedHandler here just to wrap it properly, but we probably don't want to actually cache LLM responses aggressively
|
||||
// unless we implement specific caching logic. For now, let's use the handler directly or create a simple wrapper if needed.
|
||||
// Actually, AbstractProduct expects { definition, handler } objects.
|
||||
// And `registry.ts` does: app.openapi(route.definition, route.handler);
|
||||
this.routes.push({
|
||||
definition: postChatCompletionsRoute,
|
||||
handler: handleChatCompletions
|
||||
});
|
||||
}
|
||||
hash(data) {
|
||||
return 'openai-hash';
|
||||
}
|
||||
meta(userId) {
|
||||
return { userId };
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvcHJvZHVjdHMvb3BlbmFpL2luZGV4LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sRUFBRSxlQUFlLEVBQUUsTUFBTSx1QkFBdUIsQ0FBQztBQUN4RCxPQUFPLEVBQUUsd0JBQXdCLEVBQUUsTUFBTSxhQUFhLENBQUM7QUFDdkQsT0FBTyxFQUFFLHFCQUFxQixFQUFFLE1BQU0sZUFBZSxDQUFDO0FBRXRELE1BQU0sT0FBTyxhQUFjLFNBQVEsZUFBb0I7SUFDbkQsRUFBRSxHQUFHLFFBQVEsQ0FBQztJQUNkLFVBQVUsR0FBRyxFQUFFLENBQUM7SUFDaEIsT0FBTyxHQUFHLEVBQUUsQ0FBQztJQUNiLE9BQU8sR0FBRyxFQUFFLENBQUM7SUFDYixNQUFNLEdBQVUsRUFBRSxDQUFDO0lBRW5CO1FBQ0ksS0FBSyxFQUFFLENBQUM7UUFDUixJQUFJLENBQUMsZ0JBQWdCLEVBQUUsQ0FBQztJQUM1QixDQUFDO0lBRUQsZ0JBQWdCO1FBQ1oscUNBQXFDO1FBQ3JDLDhIQUE4SDtRQUM5SCw0SEFBNEg7UUFDNUgscUVBQXFFO1FBQ3JFLHdFQUF3RTtRQUV4RSxJQUFJLENBQUMsTUFBTSxDQUFDLElBQUksQ0FBQztZQUNiLFVBQVUsRUFBRSx3QkFBd0I7WUFDcEMsT0FBTyxFQUFFLHFCQUFxQjtTQUNqQyxDQUFDLENBQUM7SUFDUCxDQUFDO0lBRUQsSUFBSSxDQUFDLElBQVM7UUFDVixPQUFPLGFBQWEsQ0FBQztJQUN6QixDQUFDO0lBRUQsSUFBSSxDQUFDLE1BQWM7UUFDZixPQUFPLEVBQUUsTUFBTSxFQUFFLENBQUM7SUFDdEIsQ0FBQztDQUNKIn0=
|
||||
58
dist-in/products/openai/routes.js
Normal file
58
dist-in/products/openai/routes.js
Normal file
@ -0,0 +1,58 @@
|
||||
import { createRoute, z } from '@hono/zod-openapi';
|
||||
export const postChatCompletionsRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/openai/v1/chat/completions',
|
||||
tags: ['OpenAI'],
|
||||
summary: 'Chat Completions Proxy',
|
||||
description: 'Proxies chat completion requests to OpenAI, injecting user API key.',
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
model: z.string(),
|
||||
messages: z.array(z.object({
|
||||
role: z.string(),
|
||||
content: z.any() // string or array (for multimodal)
|
||||
})),
|
||||
stream: z.boolean().optional(),
|
||||
temperature: z.number().optional(),
|
||||
top_p: z.number().optional(),
|
||||
n: z.number().optional(),
|
||||
presence_penalty: z.number().optional(),
|
||||
frequency_penalty: z.number().optional(),
|
||||
logit_bias: z.record(z.string(), z.number()).optional(),
|
||||
user: z.string().optional(),
|
||||
max_tokens: z.number().optional(),
|
||||
response_format: z.any().optional(),
|
||||
tools: z.array(z.any()).optional(),
|
||||
tool_choice: z.any().optional(),
|
||||
}).passthrough() // Allow other OpenAI params
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Chat completion response',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
},
|
||||
'text/event-stream': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
},
|
||||
400: {
|
||||
description: 'Bad Request'
|
||||
},
|
||||
401: {
|
||||
description: 'Unauthorized'
|
||||
},
|
||||
500: {
|
||||
description: 'Internal Server Error'
|
||||
}
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicm91dGVzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vLi4vLi4vc3JjL3Byb2R1Y3RzL29wZW5haS9yb3V0ZXMudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUEsT0FBTyxFQUFFLFdBQVcsRUFBRSxDQUFDLEVBQUUsTUFBTSxtQkFBbUIsQ0FBQztBQUVuRCxNQUFNLENBQUMsTUFBTSx3QkFBd0IsR0FBRyxXQUFXLENBQUM7SUFDaEQsTUFBTSxFQUFFLE1BQU07SUFDZCxJQUFJLEVBQUUsaUNBQWlDO0lBQ3ZDLElBQUksRUFBRSxDQUFDLFFBQVEsQ0FBQztJQUNoQixPQUFPLEVBQUUsd0JBQXdCO0lBQ2pDLFdBQVcsRUFBRSxxRUFBcUU7SUFDbEYsT0FBTyxFQUFFO1FBQ0wsSUFBSSxFQUFFO1lBQ0YsT0FBTyxFQUFFO2dCQUNMLGtCQUFrQixFQUFFO29CQUNoQixNQUFNLEVBQUUsQ0FBQyxDQUFDLE1BQU0sQ0FBQzt3QkFDYixLQUFLLEVBQUUsQ0FBQyxDQUFDLE1BQU0sRUFBRTt3QkFDakIsUUFBUSxFQUFFLENBQUMsQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLE1BQU0sQ0FBQzs0QkFDdkIsSUFBSSxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUU7NEJBQ2hCLE9BQU8sRUFBRSxDQUFDLENBQUMsR0FBRyxFQUFFLENBQUMsbUNBQW1DO3lCQUN2RCxDQUFDLENBQUM7d0JBQ0gsTUFBTSxFQUFFLENBQUMsQ0FBQyxPQUFPLEVBQUUsQ0FBQyxRQUFRLEVBQUU7d0JBQzlCLFdBQVcsRUFBRSxDQUFDLENBQUMsTUFBTSxFQUFFLENBQUMsUUFBUSxFQUFFO3dCQUNsQyxLQUFLLEVBQUUsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFDLFFBQVEsRUFBRTt3QkFDNUIsQ0FBQyxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxRQUFRLEVBQUU7d0JBQ3hCLGdCQUFnQixFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxRQUFRLEVBQUU7d0JBQ3ZDLGlCQUFpQixFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxRQUFRLEVBQUU7d0JBQ3hDLFVBQVUsRUFBRSxDQUFDLENBQUMsTUFBTSxDQUFDLENBQUMsQ0FBQyxNQUFNLEVBQUUsRUFBRSxDQUFDLENBQUMsTUFBTSxFQUFFLENBQUMsQ0FBQyxRQUFRLEVBQUU7d0JBQ3ZELElBQUksRUFBRSxDQUFDLENBQUMsTUFBTSxFQUFFLENBQUMsUUFBUSxFQUFFO3dCQUMzQixVQUFVLEVBQUUsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFDLFFBQVEsRUFBRTt3QkFDakMsZUFBZSxFQUFFLENBQUMsQ0FBQyxHQUFHLEVBQUUsQ0FBQyxRQUFRLEVBQUU7d0JBQ25DLEtBQUssRUFBRSxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQyxHQUFHLEVBQUUsQ0FBQyxDQUFDLFFBQVEsRUFBRTt3QkFDbEMsV0FBVyxFQUFFLENBQUMsQ0FBQyxHQUFHLEVBQUUsQ0FBQyxRQUFRLEVBQUU7cUJBQ2xDLENBQUMsQ0FBQyxXQUFXLEVBQUUsQ0FBQyw0QkFBNEI7aUJBQ2hEO2FBQ0o7U0FDSjtLQUNKO0lBQ0QsU0FBUyxFQUFFO1FBQ1AsR0FBRyxFQUFFO1lBQ0QsV0FBVyxFQUFFLDBCQUEwQjtZQUN2QyxPQUFPLEVBQUU7Z0JBQ0wsa0JBQWtCLEVBQUU7b0JBQ2hCLE1BQU0sRUFBRSxDQUFDLENBQUMsR0FBRyxFQUFFO2lCQUNsQjtnQkFDRCxtQkFBbUIsRUFBRTtvQkFDakIsTUFBTSxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUU7aUJBQ3JCO2FBQ0o7U0FDSjtRQUNELEdBQUcsRUFBRTtZQUNELFdBQVcsRUFBRSxhQUFhO1NBQzdCO1FBQ0QsR0FBRyxFQUFFO1lBQ0QsV0FBVyxFQUFFLGNBQWM7U0FDOUI7UUFDRCxHQUFHLEVBQUU7WUFDRCxXQUFXLEVBQUUsdUJBQXVCO1NBQ3ZDO0tBQ0o7Q0FDSixDQUFDLENBQUMifQ==
|
||||
49
dist-in/products/registry.js
Normal file
49
dist-in/products/registry.js
Normal file
@ -0,0 +1,49 @@
|
||||
import './subscriber.js';
|
||||
import { OpenAIProduct } from './openai/index.js';
|
||||
import { AnalyticsProduct } from './analytics/index.js';
|
||||
// import './subscriber.js';
|
||||
let instances = [];
|
||||
export const ALL_PRODUCTS = instances;
|
||||
export const registerProductRoutes = async (app) => {
|
||||
console.log('Registering product routes');
|
||||
// Instantiate all products
|
||||
instances = [
|
||||
new OpenAIProduct(),
|
||||
new AnalyticsProduct(),
|
||||
];
|
||||
instances.forEach(product => {
|
||||
console.log(`Registering routes for product ${product.id}`);
|
||||
product.routes.forEach((route) => {
|
||||
// @ts-ignore
|
||||
app.openapi(route.definition, route.handler);
|
||||
});
|
||||
});
|
||||
};
|
||||
export const getAllWorkers = () => {
|
||||
return instances.flatMap(p => p.workers || []);
|
||||
};
|
||||
export const startProducts = async (boss) => {
|
||||
for (const product of instances) {
|
||||
try {
|
||||
// Create a timeout promise
|
||||
const timeoutPromise = new Promise((_, reject) => {
|
||||
const id = setTimeout(() => {
|
||||
clearTimeout(id);
|
||||
// @ts-ignore
|
||||
reject(new Error(`Product ${product?.id || 'unknown'} startup timed out`));
|
||||
}, 20000); // 5 seconds timeout
|
||||
});
|
||||
// Race the product start against the timeout
|
||||
await Promise.race([
|
||||
product.start(boss),
|
||||
timeoutPromise
|
||||
]);
|
||||
}
|
||||
catch (err) {
|
||||
// @ts-ignore
|
||||
console.error(`Failed to start product ${product.id}`, err);
|
||||
// Continue with other products even if one fails
|
||||
}
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicmVnaXN0cnkuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvcHJvZHVjdHMvcmVnaXN0cnkudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQ0EsT0FBTyxpQkFBaUIsQ0FBQztBQUV6QixPQUFPLEVBQUUsYUFBYSxFQUFFLE1BQU0sbUJBQW1CLENBQUM7QUFDbEQsT0FBTyxFQUFFLGdCQUFnQixFQUFFLE1BQU0sc0JBQXNCLENBQUM7QUFFeEQsNEJBQTRCO0FBRTVCLElBQUksU0FBUyxHQUFVLEVBQUUsQ0FBQztBQUMxQixNQUFNLENBQUMsTUFBTSxZQUFZLEdBQUcsU0FBUyxDQUFDO0FBRXRDLE1BQU0sQ0FBQyxNQUFNLHFCQUFxQixHQUFHLEtBQUssRUFBRSxHQUFRLEVBQUUsRUFBRTtJQUNwRCxPQUFPLENBQUMsR0FBRyxDQUFDLDRCQUE0QixDQUFDLENBQUM7SUFDMUMsMkJBQTJCO0lBQzNCLFNBQVMsR0FBRztRQUNSLElBQUksYUFBYSxFQUFFO1FBQ25CLElBQUksZ0JBQWdCLEVBQUU7S0FDekIsQ0FBQztJQUVGLFNBQVMsQ0FBQyxPQUFPLENBQUMsT0FBTyxDQUFDLEVBQUU7UUFDeEIsT0FBTyxDQUFDLEdBQUcsQ0FBQyxrQ0FBa0MsT0FBTyxDQUFDLEVBQUUsRUFBRSxDQUFDLENBQUM7UUFDNUQsT0FBTyxDQUFDLE1BQU0sQ0FBQyxPQUFPLENBQUMsQ0FBQyxLQUFVLEVBQUUsRUFBRTtZQUNsQyxhQUFhO1lBQ2IsR0FBRyxDQUFDLE9BQU8sQ0FBQyxLQUFLLENBQUMsVUFBVSxFQUFFLEtBQUssQ0FBQyxPQUFPLENBQUMsQ0FBQztRQUNqRCxDQUFDLENBQUMsQ0FBQztJQUNQLENBQUMsQ0FBQyxDQUFDO0FBQ1AsQ0FBQyxDQUFDO0FBRUYsTUFBTSxDQUFDLE1BQU0sYUFBYSxHQUFHLEdBQUcsRUFBRTtJQUM5QixPQUFPLFNBQVMsQ0FBQyxPQUFPLENBQUMsQ0FBQyxDQUFDLEVBQUUsQ0FBQyxDQUFDLENBQUMsT0FBTyxJQUFJLEVBQUUsQ0FBQyxDQUFDO0FBQ25ELENBQUMsQ0FBQztBQUVGLE1BQU0sQ0FBQyxNQUFNLGFBQWEsR0FBRyxLQUFLLEVBQUUsSUFBVSxFQUFFLEVBQUU7SUFDOUMsS0FBSyxNQUFNLE9BQU8sSUFBSSxTQUFTLEVBQUUsQ0FBQztRQUM5QixJQUFJLENBQUM7WUFDRCwyQkFBMkI7WUFDM0IsTUFBTSxjQUFjLEdBQUcsSUFBSSxPQUFPLENBQUMsQ0FBQyxDQUFDLEVBQUUsTUFBTSxFQUFFLEVBQUU7Z0JBQzdDLE1BQU0sRUFBRSxHQUFHLFVBQVUsQ0FBQyxHQUFHLEVBQUU7b0JBQ3ZCLFlBQVksQ0FBQyxFQUFFLENBQUMsQ0FBQztvQkFDakIsYUFBYTtvQkFDYixNQUFNLENBQUMsSUFBSSxLQUFLLENBQUMsV0FBVyxPQUFPLEVBQUUsRUFBRSxJQUFJLFNBQVMsb0JBQW9CLENBQUMsQ0FBQyxDQUFDO2dCQUMvRSxDQUFDLEVBQUUsS0FBSyxDQUFDLENBQUMsQ0FBQyxvQkFBb0I7WUFDbkMsQ0FBQyxDQUFDLENBQUM7WUFFSCw2Q0FBNkM7WUFDN0MsTUFBTSxPQUFPLENBQUMsSUFBSSxDQUFDO2dCQUNmLE9BQU8sQ0FBQyxLQUFLLENBQUMsSUFBSSxDQUFDO2dCQUNuQixjQUFjO2FBQ2pCLENBQUMsQ0FBQztRQUVQLENBQUM7UUFBQyxPQUFPLEdBQUcsRUFBRSxDQUFDO1lBQ1gsYUFBYTtZQUNiLE9BQU8sQ0FBQyxLQUFLLENBQUMsMkJBQTJCLE9BQU8sQ0FBQyxFQUFFLEVBQUUsRUFBRSxHQUFHLENBQUMsQ0FBQztZQUM1RCxpREFBaUQ7UUFDckQsQ0FBQztJQUNMLENBQUM7QUFDTCxDQUFDLENBQUMifQ==
|
||||
307
dist-in/products/serving/routes.js
Normal file
307
dist-in/products/serving/routes.js
Normal file
File diff suppressed because one or more lines are too long
36
dist-in/products/subscriber.js
Normal file
36
dist-in/products/subscriber.js
Normal file
@ -0,0 +1,36 @@
|
||||
import { ALL_PRODUCTS } from './registry.js';
|
||||
import { EventBus } from './EventBus.js';
|
||||
const findProductByQueue = (queue) => {
|
||||
return ALL_PRODUCTS.find(p => p.workers?.some((w) => {
|
||||
try {
|
||||
const worker = new w();
|
||||
return worker.queueName === queue;
|
||||
}
|
||||
catch (e) {
|
||||
return false;
|
||||
}
|
||||
}));
|
||||
};
|
||||
EventBus.on('job:create', (event) => {
|
||||
const product = findProductByQueue(event.queue);
|
||||
if (!product)
|
||||
return;
|
||||
// Apply default job options from product if available
|
||||
if (product.jobOptions) {
|
||||
event.options = { ...product.jobOptions, ...event.options };
|
||||
}
|
||||
const singletonKey = product.hash(event.data);
|
||||
if (singletonKey) {
|
||||
event.options.singletonKey = singletonKey;
|
||||
// Default to 5 minutes if not specified
|
||||
if (!event.options.singletonSeconds) {
|
||||
event.options.singletonSeconds = 300;
|
||||
}
|
||||
}
|
||||
const { userId } = event.data;
|
||||
if (userId) {
|
||||
const metadata = product.meta(userId);
|
||||
event.data = { ...event.data, ...metadata };
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic3Vic2NyaWJlci5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9wcm9kdWN0cy9zdWJzY3JpYmVyLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sRUFBRSxZQUFZLEVBQUUsTUFBTSxlQUFlLENBQUM7QUFDN0MsT0FBTyxFQUFFLFFBQVEsRUFBRSxNQUFNLGVBQWUsQ0FBQztBQUV6QyxNQUFNLGtCQUFrQixHQUFHLENBQUMsS0FBYSxFQUFFLEVBQUU7SUFDekMsT0FBTyxZQUFZLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQyxFQUFFLENBQ3pCLENBQUMsQ0FBQyxPQUFPLEVBQUUsSUFBSSxDQUFDLENBQUMsQ0FBTSxFQUFFLEVBQUU7UUFDdkIsSUFBSSxDQUFDO1lBQ0QsTUFBTSxNQUFNLEdBQUcsSUFBSyxDQUFTLEVBQUUsQ0FBQztZQUNoQyxPQUFPLE1BQU0sQ0FBQyxTQUFTLEtBQUssS0FBSyxDQUFDO1FBQ3RDLENBQUM7UUFBQyxPQUFPLENBQUMsRUFBRSxDQUFDO1lBQ1QsT0FBTyxLQUFLLENBQUM7UUFDakIsQ0FBQztJQUNMLENBQUMsQ0FBQyxDQUNMLENBQUM7QUFDTixDQUFDLENBQUM7QUFFRixRQUFRLENBQUMsRUFBRSxDQUFDLFlBQVksRUFBRSxDQUFDLEtBQVUsRUFBRSxFQUFFO0lBQ3JDLE1BQU0sT0FBTyxHQUFHLGtCQUFrQixDQUFDLEtBQUssQ0FBQyxLQUFLLENBQUMsQ0FBQztJQUVoRCxJQUFJLENBQUMsT0FBTztRQUFFLE9BQU87SUFFckIsc0RBQXNEO0lBQ3RELElBQUksT0FBTyxDQUFDLFVBQVUsRUFBRSxDQUFDO1FBQ3JCLEtBQUssQ0FBQyxPQUFPLEdBQUcsRUFBRSxHQUFHLE9BQU8sQ0FBQyxVQUFVLEVBQUUsR0FBRyxLQUFLLENBQUMsT0FBTyxFQUFFLENBQUM7SUFDaEUsQ0FBQztJQUVELE1BQU0sWUFBWSxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLElBQUksQ0FBQyxDQUFDO0lBQzlDLElBQUksWUFBWSxFQUFFLENBQUM7UUFDZixLQUFLLENBQUMsT0FBTyxDQUFDLFlBQVksR0FBRyxZQUFZLENBQUM7UUFDMUMsd0NBQXdDO1FBQ3hDLElBQUksQ0FBQyxLQUFLLENBQUMsT0FBTyxDQUFDLGdCQUFnQixFQUFFLENBQUM7WUFDbEMsS0FBSyxDQUFDLE9BQU8sQ0FBQyxnQkFBZ0IsR0FBRyxHQUFHLENBQUM7UUFDekMsQ0FBQztJQUNMLENBQUM7SUFFRCxNQUFNLEVBQUUsTUFBTSxFQUFFLEdBQUcsS0FBSyxDQUFDLElBQUksQ0FBQztJQUM5QixJQUFJLE1BQU0sRUFBRSxDQUFDO1FBQ1QsTUFBTSxRQUFRLEdBQUcsT0FBTyxDQUFDLElBQUksQ0FBQyxNQUFNLENBQUMsQ0FBQztRQUN0QyxLQUFLLENBQUMsSUFBSSxHQUFHLEVBQUUsR0FBRyxLQUFLLENBQUMsSUFBSSxFQUFFLEdBQUcsUUFBUSxFQUFFLENBQUM7SUFDaEQsQ0FBQztBQUVMLENBQUMsQ0FBQyxDQUFDIn0=
|
||||
20
dist-in/schemas/index.js
Normal file
20
dist-in/schemas/index.js
Normal file
@ -0,0 +1,20 @@
|
||||
import { z } from '@hono/zod-openapi';
|
||||
export const ErrorSchema = z.object({
|
||||
error: z.string(),
|
||||
});
|
||||
export const ImageSchema = z.object({
|
||||
idx: z.number().openapi({ example: 0 }),
|
||||
id: z.number().openapi({ example: 6 }),
|
||||
name: z.string().openapi({ example: 'images' }),
|
||||
slug: z.string().openapi({ example: 'images' }),
|
||||
description: z.string().openapi({ example: 'fcghdfgh' }),
|
||||
price: z.string().openapi({ example: '10.00' }),
|
||||
variants: z.string().openapi({ example: '[]' }),
|
||||
created_at: z.string().openapi({ example: '2025-11-22 10:46:09.77718+00' }),
|
||||
updated_at: z.string().openapi({ example: '2025-11-22 10:46:09.77718+00' }),
|
||||
});
|
||||
export const ImageResponseSchema = z.object({
|
||||
message: z.string().openapi({ example: 'Success' }),
|
||||
data: z.array(ImageSchema).optional(),
|
||||
});
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvc2NoZW1hcy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsQ0FBQyxFQUFFLE1BQU0sbUJBQW1CLENBQUE7QUFFckMsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLENBQUMsQ0FBQyxNQUFNLENBQUM7SUFDaEMsS0FBSyxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUU7Q0FDcEIsQ0FBQyxDQUFBO0FBQ0YsTUFBTSxDQUFDLE1BQU0sV0FBVyxHQUFHLENBQUMsQ0FBQyxNQUFNLENBQUM7SUFDaEMsR0FBRyxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxPQUFPLENBQUMsRUFBRSxPQUFPLEVBQUUsQ0FBQyxFQUFFLENBQUM7SUFDdkMsRUFBRSxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxPQUFPLENBQUMsRUFBRSxPQUFPLEVBQUUsQ0FBQyxFQUFFLENBQUM7SUFDdEMsSUFBSSxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxPQUFPLENBQUMsRUFBRSxPQUFPLEVBQUUsUUFBUSxFQUFFLENBQUM7SUFDL0MsSUFBSSxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxPQUFPLENBQUMsRUFBRSxPQUFPLEVBQUUsUUFBUSxFQUFFLENBQUM7SUFDL0MsV0FBVyxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxPQUFPLENBQUMsRUFBRSxPQUFPLEVBQUUsVUFBVSxFQUFFLENBQUM7SUFDeEQsS0FBSyxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxPQUFPLENBQUMsRUFBRSxPQUFPLEVBQUUsT0FBTyxFQUFFLENBQUM7SUFDL0MsUUFBUSxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxPQUFPLENBQUMsRUFBRSxPQUFPLEVBQUUsSUFBSSxFQUFFLENBQUM7SUFDL0MsVUFBVSxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxPQUFPLENBQUMsRUFBRSxPQUFPLEVBQUUsOEJBQThCLEVBQUUsQ0FBQztJQUMzRSxVQUFVLEVBQUUsQ0FBQyxDQUFDLE1BQU0sRUFBRSxDQUFDLE9BQU8sQ0FBQyxFQUFFLE9BQU8sRUFBRSw4QkFBOEIsRUFBRSxDQUFDO0NBQzlFLENBQUMsQ0FBQTtBQUVGLE1BQU0sQ0FBQyxNQUFNLG1CQUFtQixHQUFHLENBQUMsQ0FBQyxNQUFNLENBQUM7SUFDeEMsT0FBTyxFQUFFLENBQUMsQ0FBQyxNQUFNLEVBQUUsQ0FBQyxPQUFPLENBQUMsRUFBRSxPQUFPLEVBQUUsU0FBUyxFQUFFLENBQUM7SUFDbkQsSUFBSSxFQUFFLENBQUMsQ0FBQyxLQUFLLENBQUMsV0FBVyxDQUFDLENBQUMsUUFBUSxFQUFFO0NBQ3hDLENBQUMsQ0FBQSJ9
|
||||
82
dist-in/serve-assets.js
Normal file
82
dist-in/serve-assets.js
Normal file
@ -0,0 +1,82 @@
|
||||
import { serveStatic } from '@hono/node-server/serve-static';
|
||||
import path from 'path';
|
||||
export const registerAssetRoutes = (app) => {
|
||||
// Serve manifest.webmanifest from dist root
|
||||
app.get('/manifest.webmanifest', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
path: 'manifest.webmanifest'
|
||||
}));
|
||||
// Serve service worker — must never be cached so browser always checks for updates
|
||||
app.get('/sw.js', async (c, next) => {
|
||||
await next();
|
||||
c.res.headers.set('Cache-Control', 'no-cache');
|
||||
});
|
||||
app.get('/sw.js', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
path: 'sw.js'
|
||||
}));
|
||||
// Serve registerSW.js — must always be fresh
|
||||
app.get('/registerSW.js', async (c, next) => {
|
||||
await next();
|
||||
c.res.headers.set('Cache-Control', 'no-cache');
|
||||
});
|
||||
app.get('/registerSW.js', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
path: 'registerSW.js'
|
||||
}));
|
||||
// Serve workbox assets if they are at root
|
||||
app.get('/workbox-*.js', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
rewriteRequestPath: (path) => path // Serve matching file
|
||||
}));
|
||||
// Serve workbox assets if they are at root
|
||||
app.get('/widgets/*', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist/widgets',
|
||||
rewriteRequestPath: (path) => path // Serve matching file
|
||||
}));
|
||||
// Serve root static assets (images, icons, robots.txt, etc) — short cache since not hash-busted
|
||||
app.use('/:file{.+\\.(png|ico|jpg|jpeg|svg|txt|xml)$}', async (c, next) => {
|
||||
await next();
|
||||
if (c.res.ok) {
|
||||
c.res.headers.set('Cache-Control', 'public, max-age=3600, must-revalidate');
|
||||
}
|
||||
});
|
||||
app.get('/:file{.+\\.(png|ico|jpg|jpeg|svg|txt|xml)$}', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
}));
|
||||
// Serve static assets from dist
|
||||
app.use('/assets/*', async (c, next) => {
|
||||
await next();
|
||||
if (c.res.ok && c.res.status === 200) {
|
||||
c.res.headers.set('Cache-Control', 'public, max-age=31536000, immutable');
|
||||
}
|
||||
});
|
||||
app.use('/assets/*', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
onNotFound: (path, c) => {
|
||||
return undefined;
|
||||
}
|
||||
}));
|
||||
// Serve embed assets
|
||||
app.use('/embed_assets/*', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH ? path.join(process.env.CLIENT_DIST_PATH, 'client/embed') : '../dist/client/embed',
|
||||
onNotFound: (path, c) => {
|
||||
return undefined;
|
||||
},
|
||||
rewriteRequestPath: (path) => path.replace(/^\/embed_assets/, ''),
|
||||
}));
|
||||
// Serve filebrowser assets
|
||||
app.use('/filebrowser_assets/*', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH ? path.join(process.env.CLIENT_DIST_PATH, 'client/filebrowser') : '../dist/client/filebrowser',
|
||||
onNotFound: (path, c) => {
|
||||
return undefined;
|
||||
},
|
||||
rewriteRequestPath: (path) => path.replace(/^\/filebrowser_assets/, ''),
|
||||
}));
|
||||
// Fallback to index.html for SPA
|
||||
app.get('*', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
path: 'index.html'
|
||||
}));
|
||||
};
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic2VydmUtYXNzZXRzLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL3NlcnZlLWFzc2V0cy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFFQSxPQUFPLEVBQUUsV0FBVyxFQUFFLE1BQU0sZ0NBQWdDLENBQUE7QUFDNUQsT0FBTyxJQUFJLE1BQU0sTUFBTSxDQUFBO0FBRXZCLE1BQU0sQ0FBQyxNQUFNLG1CQUFtQixHQUFHLENBQUMsR0FBZ0IsRUFBRSxFQUFFO0lBQ3BELDRDQUE0QztJQUM1QyxHQUFHLENBQUMsR0FBRyxDQUFDLHVCQUF1QixFQUFFLFdBQVcsQ0FBQztRQUN6QyxJQUFJLEVBQUUsT0FBTyxDQUFDLEdBQUcsQ0FBQyxnQkFBZ0IsSUFBSSxTQUFTO1FBQy9DLElBQUksRUFBRSxzQkFBc0I7S0FDL0IsQ0FBQyxDQUFDLENBQUM7SUFFSixtRkFBbUY7SUFDbkYsR0FBRyxDQUFDLEdBQUcsQ0FBQyxRQUFRLEVBQUUsS0FBSyxFQUFFLENBQUMsRUFBRSxJQUFJLEVBQUUsRUFBRTtRQUNoQyxNQUFNLElBQUksRUFBRSxDQUFDO1FBQ2IsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxPQUFPLENBQUMsR0FBRyxDQUFDLGVBQWUsRUFBRSxVQUFVLENBQUMsQ0FBQztJQUNuRCxDQUFDLENBQUMsQ0FBQztJQUNILEdBQUcsQ0FBQyxHQUFHLENBQUMsUUFBUSxFQUFFLFdBQVcsQ0FBQztRQUMxQixJQUFJLEVBQUUsT0FBTyxDQUFDLEdBQUcsQ0FBQyxnQkFBZ0IsSUFBSSxTQUFTO1FBQy9DLElBQUksRUFBRSxPQUFPO0tBQ2hCLENBQUMsQ0FBQyxDQUFDO0lBRUosNkNBQTZDO0lBQzdDLEdBQUcsQ0FBQyxHQUFHLENBQUMsZ0JBQWdCLEVBQUUsS0FBSyxFQUFFLENBQUMsRUFBRSxJQUFJLEVBQUUsRUFBRTtRQUN4QyxNQUFNLElBQUksRUFBRSxDQUFDO1FBQ2IsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxPQUFPLENBQUMsR0FBRyxDQUFDLGVBQWUsRUFBRSxVQUFVLENBQUMsQ0FBQztJQUNuRCxDQUFDLENBQUMsQ0FBQztJQUNILEdBQUcsQ0FBQyxHQUFHLENBQUMsZ0JBQWdCLEVBQUUsV0FBVyxDQUFDO1FBQ2xDLElBQUksRUFBRSxPQUFPLENBQUMsR0FBRyxDQUFDLGdCQUFnQixJQUFJLFNBQVM7UUFDL0MsSUFBSSxFQUFFLGVBQWU7S0FDeEIsQ0FBQyxDQUFDLENBQUM7SUFFSiwyQ0FBMkM7SUFDM0MsR0FBRyxDQUFDLEdBQUcsQ0FBQyxlQUFlLEVBQUUsV0FBVyxDQUFDO1FBQ2pDLElBQUksRUFBRSxPQUFPLENBQUMsR0FBRyxDQUFDLGdCQUFnQixJQUFJLFNBQVM7UUFDL0Msa0JBQWtCLEVBQUUsQ0FBQyxJQUFJLEVBQUUsRUFBRSxDQUFDLElBQUksQ0FBQyxzQkFBc0I7S0FDNUQsQ0FBQyxDQUFDLENBQUM7SUFFSiwyQ0FBMkM7SUFDM0MsR0FBRyxDQUFDLEdBQUcsQ0FBQyxZQUFZLEVBQUUsV0FBVyxDQUFDO1FBQzlCLElBQUksRUFBRSxPQUFPLENBQUMsR0FBRyxDQUFDLGdCQUFnQixJQUFJLGlCQUFpQjtRQUN2RCxrQkFBa0IsRUFBRSxDQUFDLElBQUksRUFBRSxFQUFFLENBQUMsSUFBSSxDQUFDLHNCQUFzQjtLQUM1RCxDQUFDLENBQUMsQ0FBQztJQUVKLGdHQUFnRztJQUNoRyxHQUFHLENBQUMsR0FBRyxDQUFDLDhDQUE4QyxFQUFFLEtBQUssRUFBRSxDQUFDLEVBQUUsSUFBSSxFQUFFLEVBQUU7UUFDdEUsTUFBTSxJQUFJLEVBQUUsQ0FBQztRQUNiLElBQUksQ0FBQyxDQUFDLEdBQUcsQ0FBQyxFQUFFLEVBQUUsQ0FBQztZQUNYLENBQUMsQ0FBQyxHQUFHLENBQUMsT0FBTyxDQUFDLEdBQUcsQ0FBQyxlQUFlLEVBQUUsdUNBQXVDLENBQUMsQ0FBQztRQUNoRixDQUFDO0lBQ0wsQ0FBQyxDQUFDLENBQUM7SUFDSCxHQUFHLENBQUMsR0FBRyxDQUFDLDhDQUE4QyxFQUFFLFdBQVcsQ0FBQztRQUNoRSxJQUFJLEVBQUUsT0FBTyxDQUFDLEdBQUcsQ0FBQyxnQkFBZ0IsSUFBSSxTQUFTO0tBQ2xELENBQUMsQ0FBQyxDQUFDO0lBRUosZ0NBQWdDO0lBQ2hDLEdBQUcsQ0FBQyxHQUFHLENBQUMsV0FBVyxFQUFFLEtBQUssRUFBRSxDQUFDLEVBQUUsSUFBSSxFQUFFLEVBQUU7UUFDbkMsTUFBTSxJQUFJLEVBQUUsQ0FBQztRQUNiLElBQUksQ0FBQyxDQUFDLEdBQUcsQ0FBQyxFQUFFLElBQUksQ0FBQyxDQUFDLEdBQUcsQ0FBQyxNQUFNLEtBQUssR0FBRyxFQUFFLENBQUM7WUFDbkMsQ0FBQyxDQUFDLEdBQUcsQ0FBQyxPQUFPLENBQUMsR0FBRyxDQUFDLGVBQWUsRUFBRSxxQ0FBcUMsQ0FBQyxDQUFDO1FBQzlFLENBQUM7SUFDTCxDQUFDLENBQUMsQ0FBQztJQUNILEdBQUcsQ0FBQyxHQUFHLENBQUMsV0FBVyxFQUFFLFdBQVcsQ0FBQztRQUM3QixJQUFJLEVBQUUsT0FBTyxDQUFDLEdBQUcsQ0FBQyxnQkFBZ0IsSUFBSSxTQUFTO1FBQy9DLFVBQVUsRUFBRSxDQUFDLElBQUksRUFBRSxDQUFDLEVBQUUsRUFBRTtZQUNwQixPQUFPLFNBQVMsQ0FBQztRQUNyQixDQUFDO0tBQ0osQ0FBQyxDQUFDLENBQUM7SUFFSixxQkFBcUI7SUFDckIsR0FBRyxDQUFDLEdBQUcsQ0FBQyxpQkFBaUIsRUFBRSxXQUFXLENBQUM7UUFDbkMsSUFBSSxFQUFFLE9BQU8sQ0FBQyxHQUFHLENBQUMsZ0JBQWdCLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsT0FBTyxDQUFDLEdBQUcsQ0FBQyxnQkFBZ0IsRUFBRSxjQUFjLENBQUMsQ0FBQyxDQUFDLENBQUMsc0JBQXNCO1FBQ3JILFVBQVUsRUFBRSxDQUFDLElBQUksRUFBRSxDQUFDLEVBQUUsRUFBRTtZQUNwQixPQUFPLFNBQVMsQ0FBQztRQUNyQixDQUFDO1FBQ0Qsa0JBQWtCLEVBQUUsQ0FBQyxJQUFJLEVBQUUsRUFBRSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsaUJBQWlCLEVBQUUsRUFBRSxDQUFDO0tBQ3BFLENBQUMsQ0FBQyxDQUFDO0lBRUosMkJBQTJCO0lBQzNCLEdBQUcsQ0FBQyxHQUFHLENBQUMsdUJBQXVCLEVBQUUsV0FBVyxDQUFDO1FBQ3pDLElBQUksRUFBRSxPQUFPLENBQUMsR0FBRyxDQUFDLGdCQUFnQixDQUFDLENBQUMsQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLE9BQU8sQ0FBQyxHQUFHLENBQUMsZ0JBQWdCLEVBQUUsb0JBQW9CLENBQUMsQ0FBQyxDQUFDLENBQUMsNEJBQTRCO1FBQ2pJLFVBQVUsRUFBRSxDQUFDLElBQUksRUFBRSxDQUFDLEVBQUUsRUFBRTtZQUNwQixPQUFPLFNBQVMsQ0FBQztRQUNyQixDQUFDO1FBQ0Qsa0JBQWtCLEVBQUUsQ0FBQyxJQUFJLEVBQUUsRUFBRSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUMsdUJBQXVCLEVBQUUsRUFBRSxDQUFDO0tBQzFFLENBQUMsQ0FBQyxDQUFDO0lBRUosaUNBQWlDO0lBQ2pDLEdBQUcsQ0FBQyxHQUFHLENBQUMsR0FBRyxFQUFFLFdBQVcsQ0FBQztRQUNyQixJQUFJLEVBQUUsT0FBTyxDQUFDLEdBQUcsQ0FBQyxnQkFBZ0IsSUFBSSxTQUFTO1FBQy9DLElBQUksRUFBRSxZQUFZO0tBQ3JCLENBQUMsQ0FBQyxDQUFDO0FBQ1IsQ0FBQyxDQUFBIn0=
|
||||
5
dist-in/zod-setup.js
Normal file
5
dist-in/zod-setup.js
Normal file
@ -0,0 +1,5 @@
|
||||
import { z } from 'zod';
|
||||
import { extendZodWithOpenApi } from '@hono/zod-openapi';
|
||||
extendZodWithOpenApi(z);
|
||||
export { z };
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiem9kLXNldHVwLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL3pvZC1zZXR1cC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsQ0FBQyxFQUFFLE1BQU0sS0FBSyxDQUFDO0FBQ3hCLE9BQU8sRUFBRSxvQkFBb0IsRUFBRSxNQUFNLG1CQUFtQixDQUFDO0FBRXpELG9CQUFvQixDQUFDLENBQUMsQ0FBQyxDQUFDO0FBRXhCLE9BQU8sRUFBRSxDQUFDLEVBQUUsQ0FBQyJ9
|
||||
7
openapitools.json
Normal file
7
openapitools.json
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json",
|
||||
"spaces": 2,
|
||||
"generator-cli": {
|
||||
"version": "7.19.0"
|
||||
}
|
||||
}
|
||||
14742
package-lock.json
generated
Normal file
14742
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
139
package.json
139
package.json
@ -1,45 +1,104 @@
|
||||
{
|
||||
"name": "@plastichub/template",
|
||||
"name": "server",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"version": "0.3.1",
|
||||
"main": "main.js",
|
||||
"typings": "index.d.ts",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev-o": "tsx watch src/index.ts",
|
||||
"dev": "sh ./scripts/dev.sh",
|
||||
"start": "node dist/main_node.cjs",
|
||||
"build": "tsc -p tsconfig.build.json",
|
||||
"build:webpack": "npm run build && webpack --config webpack.config.js",
|
||||
"build:exe": "npm run build:webpack && node scripts/nexe.js",
|
||||
"generate:config": "npx vite-node scripts/generate-app-config.ts",
|
||||
"seed": "tsx src/seed.ts",
|
||||
"migrate": "cd .. && npx supabase db push",
|
||||
"test": "vitest --run",
|
||||
"test:feed": "vitest run src/products/serving/__tests__/feed.e2e.test.ts",
|
||||
"test:ui": "vitest --run --ui",
|
||||
"test:coverage": "vitest --run --coverage",
|
||||
"test:usage": "vitest run usage",
|
||||
"test:emails": "vitest run email_real",
|
||||
"test:pgboss": "vitest run pgboss",
|
||||
"test:pgboss-e2e": "vitest run pgboss-e2e",
|
||||
"test:product:locations": "vitest run src/products/locations/__tests__/e2e.test.ts",
|
||||
"test:product:images": "vitest run src/products/images/__tests__/e2e.test.ts",
|
||||
"test:images": "vitest run src/products/images/__tests__/e2e.test.ts",
|
||||
"test:video": "vitest run src/products/videos/__tests__/e2e.test.ts",
|
||||
"test:video:probe": "vitest run src/products/videos/__tests__/upload.test.ts",
|
||||
"test:serve": "vitest run src/products/serving/__tests__/e2e.test.ts",
|
||||
"supabase:types": "npx supabase gen types typescript --linked > ./src/integrations/supabase/types.ts",
|
||||
"semgrep": "sh ./scripts/semgrep.sh",
|
||||
"scalar:token": "tsx scripts/get-scalar-token.js",
|
||||
"test:bots": "vitest run src/products/serving/__tests__/e2e.test.ts -t \"should serve HTML to bots\"",
|
||||
"swagger:get": "tsx scripts/swagger-get.js",
|
||||
"swagger:gen": "tsx scripts/swagger-gen-client.js",
|
||||
"supabase:schemas": "supazod -i src/integrations/supabase/types.ts -o src/integrations/supabase/schemas.ts --inline-types -s public"
|
||||
},
|
||||
"bin": {
|
||||
"osr-bin": "main.js"
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@hey-api/client-fetch": "^0.13.1",
|
||||
"@hey-api/openapi-ts": "^0.92.3",
|
||||
"@hono/node-server": "^1.19.6",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/pg": "^8.15.6",
|
||||
"@vitest/ui": "^4.0.18",
|
||||
"hono": "^4.10.6",
|
||||
"nexe": "^1.1.6",
|
||||
"pino-pretty": "^13.1.2",
|
||||
"supazod": "^4.5.0",
|
||||
"tsx": "^4.20.6",
|
||||
"typescript": "^5.9.3",
|
||||
"vite-tsconfig-paths": "^5.1.4",
|
||||
"vitest": "^4.0.18",
|
||||
"webpack": "^5.97.1",
|
||||
"webpack-cli": "^6.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/node": "^14.17.5",
|
||||
"@types/yargs": "^17.0.2",
|
||||
"chalk": "^2.4.1",
|
||||
"convert-units": "^2.3.4",
|
||||
"env-var": "^7.0.1",
|
||||
"typescript": "^4.3.5",
|
||||
"yargs": "^14.2.3",
|
||||
"yargs-parser": "^15.0.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tsc; mocha --full-trace mocha \"spec/**/*.spec.js\"",
|
||||
"test-with-coverage": "istanbul cover node_modules/.bin/_mocha -- 'spec/**/*.spec.js'",
|
||||
"lint": "tslint --project=./tsconfig.json",
|
||||
"build": "tsc -p .",
|
||||
"dev": "tsc -p . --declaration -w",
|
||||
"typings": "tsc --declaration",
|
||||
"docs": "npx typedoc src/index.ts",
|
||||
"dev-test-watch": "mocha-typescript-watch"
|
||||
},
|
||||
"homepage": "https://git.osr-plastic.org/plastichub/lib-content",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.osr-plastic.org/plastichub/lib-content.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14.0.0"
|
||||
},
|
||||
"license": "BSD-3-Clause",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
]
|
||||
}
|
||||
"@hono/node-ws": "^1.2.0",
|
||||
"@hono/swagger-ui": "^0.5.2",
|
||||
"@hono/zod-openapi": "^1.1.5",
|
||||
"@polymech/acl": "file:../polymech-mono/packages/acl",
|
||||
"@polymech/commons": "file:../polymech-mono/packages/commons",
|
||||
"@polymech/core": "file:../polymech-mono/packages/core",
|
||||
"@polymech/fs": "file:../polymech-mono/packages/fs",
|
||||
"@polymech/i18n": "file:../polymech-mono/packages/i18n",
|
||||
"@polymech/media": "file:../polymech-mono/packages/media",
|
||||
"@polymech/registry": "file:../polymech-mono/packages/registry",
|
||||
"@polymech/search": "file:../polymech-mono/packages/search",
|
||||
"@scalar/hono-api-reference": "^0.9.25",
|
||||
"@ssut/tiktok-api": "^1.5.2",
|
||||
"@supabase/supabase-js": "^2.87.1",
|
||||
"@types/pdfmake": "^0.2.12",
|
||||
"@types/sharp": "^0.31.1",
|
||||
"@types/ws": "^8.18.1",
|
||||
"cheerio": "^1.1.2",
|
||||
"chokidar": "^5.0.0",
|
||||
"deepl-node": "^1.24.0",
|
||||
"dotenv": "^17.2.3",
|
||||
"exifreader": "^4.33.1",
|
||||
"feed": "^5.1.0",
|
||||
"glob": "^13.0.3",
|
||||
"hono-rate-limiter": "^0.4.2",
|
||||
"i": "^0.3.7",
|
||||
"ignore": "^7.0.5",
|
||||
"isbot": "^5.1.34",
|
||||
"juice": "^11.1.1",
|
||||
"lru-cache": "^11.2.4",
|
||||
"marked": "^17.0.1",
|
||||
"mime": "^4.1.0",
|
||||
"npm": "^11.7.0",
|
||||
"pdfmake": "^0.2.20",
|
||||
"pg-boss": "^12.5.2",
|
||||
"pino": "^10.1.0",
|
||||
"quicktype-core": "^23.2.6",
|
||||
"sharp": "^0.34.5",
|
||||
"stripe": "^20.3.1",
|
||||
"swagger-typescript-codegen": "^3.2.4",
|
||||
"vite-node": "^5.3.0",
|
||||
"ws": "^8.19.0",
|
||||
"zod": "^4.3.6"
|
||||
}
|
||||
}
|
||||
219
src/__tests__/blocklist.test.ts
Normal file
219
src/__tests__/blocklist.test.ts
Normal file
@ -0,0 +1,219 @@
|
||||
import { describe, it, expect, beforeAll } from 'vitest'
|
||||
import { app } from '../index.js'
|
||||
import {
|
||||
loadBlocklist,
|
||||
getBlocklist,
|
||||
isIPBlocked,
|
||||
isUserBlocked,
|
||||
isTokenBlocked
|
||||
} from '../middleware/blocklist.js'
|
||||
|
||||
describe('Blocklist Middleware', () => {
|
||||
beforeAll(() => {
|
||||
// Ensure blocklist is loaded
|
||||
loadBlocklist()
|
||||
})
|
||||
|
||||
describe('Blocklist Loading', () => {
|
||||
it('should load blocklist from JSON file', () => {
|
||||
const blocklist = getBlocklist()
|
||||
|
||||
expect(blocklist).toBeDefined()
|
||||
expect(blocklist).toHaveProperty('blockedIPs')
|
||||
expect(blocklist).toHaveProperty('blockedUserIds')
|
||||
expect(blocklist).toHaveProperty('blockedTokens')
|
||||
})
|
||||
|
||||
it('should have blocked IPs in the list', () => {
|
||||
const blocklist = getBlocklist()
|
||||
|
||||
expect(Array.isArray(blocklist.blockedIPs)).toBe(true)
|
||||
expect(blocklist.blockedIPs.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it('should have blocked user IDs in the list', () => {
|
||||
const blocklist = getBlocklist()
|
||||
|
||||
expect(Array.isArray(blocklist.blockedUserIds)).toBe(true)
|
||||
expect(blocklist.blockedUserIds.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it('should have blocked tokens in the list', () => {
|
||||
const blocklist = getBlocklist()
|
||||
|
||||
expect(Array.isArray(blocklist.blockedTokens)).toBe(true)
|
||||
expect(blocklist.blockedTokens.length).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('IP Blocking', () => {
|
||||
it('should block requests from blocked IP addresses', async () => {
|
||||
const response = await app.request('/api/products', {
|
||||
headers: {
|
||||
'x-forwarded-for': '192.168.1.100' // Blocked IP from blocklist.json
|
||||
}
|
||||
})
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error')
|
||||
expect(data.error).toBe('Forbidden')
|
||||
expect(data.message).toContain('IP address has been blocked')
|
||||
}, 10000)
|
||||
|
||||
it('should allow requests from non-blocked IP addresses', async () => {
|
||||
const response = await app.request('/api/subscriptions', {
|
||||
headers: {
|
||||
'x-forwarded-for': '192.168.1.1' // Non-blocked IP
|
||||
}
|
||||
})
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
})
|
||||
|
||||
it('should check IP blocking correctly', () => {
|
||||
expect(isIPBlocked('192.168.1.100')).toBe(true)
|
||||
expect(isIPBlocked('10.0.0.50')).toBe(true)
|
||||
expect(isIPBlocked('192.168.1.1')).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('User ID Blocking', () => {
|
||||
it('should block requests from blocked user IDs', async () => {
|
||||
const response = await app.request('/api/products', {
|
||||
headers: {
|
||||
'authorization': 'user_banned_123', // Blocked user from blocklist.json
|
||||
'x-forwarded-for': '192.168.1.1'
|
||||
}
|
||||
})
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error')
|
||||
expect(data.error).toBe('Forbidden')
|
||||
expect(data.message).toContain('account has been blocked')
|
||||
}, 10000)
|
||||
|
||||
it('should allow requests from non-blocked users', async () => {
|
||||
const response = await app.request('/api/subscriptions', {
|
||||
headers: {
|
||||
'authorization': 'user_valid_789',
|
||||
'x-forwarded-for': '192.168.1.1'
|
||||
}
|
||||
})
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
})
|
||||
|
||||
it('should check user blocking correctly', () => {
|
||||
expect(isUserBlocked('user_banned_123')).toBe(true)
|
||||
expect(isUserBlocked('user_spam_456')).toBe(true)
|
||||
expect(isUserBlocked('user_valid_789')).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Token Blocking', () => {
|
||||
it('should block requests with blocked tokens', async () => {
|
||||
const response = await app.request('/api/products', {
|
||||
headers: {
|
||||
'authorization': 'Bearer malicious_token_xyz', // Blocked token
|
||||
'x-forwarded-for': '192.168.1.1'
|
||||
}
|
||||
})
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error')
|
||||
expect(data.error).toBe('Forbidden')
|
||||
expect(data.message).toContain('access token has been blocked')
|
||||
}, 10000)
|
||||
|
||||
it('should allow requests with valid tokens', async () => {
|
||||
const response = await app.request('/api/subscriptions', {
|
||||
headers: {
|
||||
'authorization': 'Bearer valid_token_abc',
|
||||
'x-forwarded-for': '192.168.1.1'
|
||||
}
|
||||
})
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
})
|
||||
|
||||
it('should check token blocking correctly', () => {
|
||||
expect(isTokenBlocked('Bearer malicious_token_xyz')).toBe(true)
|
||||
expect(isTokenBlocked('Bearer valid_token_abc')).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Multiple Blocking Criteria', () => {
|
||||
it('should block if IP is blocked even with valid token', async () => {
|
||||
const response = await app.request('/api/products', {
|
||||
headers: {
|
||||
'authorization': 'Bearer valid_token_abc',
|
||||
'x-forwarded-for': '192.168.1.100' // Blocked IP
|
||||
}
|
||||
})
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
expect((await response.json()).message).toContain('IP address')
|
||||
}, 10000)
|
||||
|
||||
it('should block if token is blocked even from valid IP', async () => {
|
||||
const response = await app.request('/api/products', {
|
||||
headers: {
|
||||
'authorization': 'Bearer malicious_token_xyz', // Blocked token
|
||||
'x-forwarded-for': '192.168.1.1' // Valid IP
|
||||
}
|
||||
})
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
expect((await response.json()).message).toContain('access token')
|
||||
}, 10000)
|
||||
})
|
||||
|
||||
describe('Non-API Routes', () => {
|
||||
it('should not apply blocklist to non-API routes', async () => {
|
||||
// Root route should not be blocked even from blocked IP
|
||||
const response = await app.request('/', {
|
||||
headers: {
|
||||
'x-forwarded-for': '192.168.1.100' // Blocked IP
|
||||
}
|
||||
})
|
||||
|
||||
// This might still be blocked if we apply blocklist globally
|
||||
// But based on our middleware setup, only /api/* is protected
|
||||
expect(response.status).toBe(200)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error Response Structure', () => {
|
||||
it('should return consistent error structure for blocked requests', async () => {
|
||||
const response = await app.request('/api/products', {
|
||||
headers: {
|
||||
'x-forwarded-for': '192.168.1.100'
|
||||
}
|
||||
})
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error')
|
||||
expect(data).toHaveProperty('message')
|
||||
expect(typeof data.error).toBe('string')
|
||||
expect(typeof data.message).toBe('string')
|
||||
}, 10000)
|
||||
})
|
||||
|
||||
describe('Blocklist Reload', () => {
|
||||
it('should be able to reload blocklist', () => {
|
||||
const initialBlocklist = getBlocklist()
|
||||
const reloadedBlocklist = loadBlocklist()
|
||||
|
||||
expect(reloadedBlocklist).toBeDefined()
|
||||
expect(reloadedBlocklist.blockedIPs).toEqual(initialBlocklist.blockedIPs)
|
||||
})
|
||||
})
|
||||
})
|
||||
1
src/__tests__/commons.ts
Normal file
1
src/__tests__/commons.ts
Normal file
@ -0,0 +1 @@
|
||||
export const TEST_LOCATION_PLACE_ID = 'ChIJ_burz4DrpBIR7Tb0r_IWzQI'
|
||||
120
src/__tests__/endpoints.test.ts
Normal file
120
src/__tests__/endpoints.test.ts
Normal file
@ -0,0 +1,120 @@
|
||||
import { describe, it, expect } from 'vitest'
|
||||
import { app } from '../index'
|
||||
|
||||
// Helper to generate unique IP for each test to avoid rate limiting
|
||||
let ipCounter = 0
|
||||
function getUniqueIP() {
|
||||
return `10.0.${Math.floor(ipCounter / 255)}.${ipCounter++ % 255}`
|
||||
}
|
||||
|
||||
describe('API Endpoints', () => {
|
||||
describe('GET /', () => {
|
||||
it('should return welcome message', async () => {
|
||||
const res = await app.request('/')
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
const text = await res.text()
|
||||
expect(text).toBe('Hello Hono + Supabase + Swagger!')
|
||||
})
|
||||
})
|
||||
|
||||
describe('GET /api/products', () => {
|
||||
it('should return products array', async () => {
|
||||
const res = await app.request('/api/products', {
|
||||
headers: { 'x-forwarded-for': getUniqueIP() }
|
||||
})
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
const data = await res.json()
|
||||
expect(Array.isArray(data)).toBe(true)
|
||||
}, 10000)
|
||||
|
||||
it('should have correct content-type header', async () => {
|
||||
const res = await app.request('/api/products', {
|
||||
headers: { 'x-forwarded-for': getUniqueIP() }
|
||||
})
|
||||
expect(res.headers.get('content-type')).toContain('application/json')
|
||||
}, 10000)
|
||||
})
|
||||
|
||||
describe('GET /api/subscriptions', () => {
|
||||
it('should return subscriptions array', async () => {
|
||||
const res = await app.request('/api/subscriptions', {
|
||||
headers: { 'x-forwarded-for': getUniqueIP() }
|
||||
})
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
const data = await res.json()
|
||||
expect(Array.isArray(data)).toBe(true)
|
||||
expect(data.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it('should return valid subscription objects', async () => {
|
||||
const res = await app.request('/api/subscriptions', {
|
||||
headers: { 'x-forwarded-for': getUniqueIP() }
|
||||
})
|
||||
const data = await res.json()
|
||||
|
||||
expect(data[0]).toHaveProperty('id')
|
||||
expect(data[0]).toHaveProperty('name')
|
||||
expect(data[0]).toHaveProperty('price')
|
||||
expect(typeof data[0].price).toBe('number')
|
||||
})
|
||||
})
|
||||
|
||||
describe('GET /api/admin/stats', () => {
|
||||
it('should return stats object', async () => {
|
||||
const res = await app.request('/api/admin/stats', {
|
||||
headers: { 'x-forwarded-for': getUniqueIP() }
|
||||
})
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
const data = await res.json()
|
||||
expect(data).toHaveProperty('users')
|
||||
expect(data).toHaveProperty('revenue')
|
||||
})
|
||||
|
||||
it('should return numeric values for stats', async () => {
|
||||
const res = await app.request('/api/admin/stats', {
|
||||
headers: { 'x-forwarded-for': getUniqueIP() }
|
||||
})
|
||||
const data = await res.json()
|
||||
|
||||
expect(typeof data.users).toBe('number')
|
||||
expect(typeof data.revenue).toBe('number')
|
||||
expect(data.users).toBeGreaterThanOrEqual(0)
|
||||
expect(data.revenue).toBeGreaterThanOrEqual(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('OpenAPI Documentation', () => {
|
||||
it('should serve OpenAPI spec at /doc', async () => {
|
||||
const res = await app.request('/doc')
|
||||
expect(res.status).toBe(200)
|
||||
|
||||
const spec = await res.json()
|
||||
expect(spec).toHaveProperty('openapi')
|
||||
expect(spec).toHaveProperty('info')
|
||||
expect(spec.info.title).toBe('SaaS API')
|
||||
})
|
||||
|
||||
it('should serve Swagger UI at /ui', async () => {
|
||||
const res = await app.request('/ui')
|
||||
expect(res.status).toBe(200)
|
||||
})
|
||||
|
||||
it('should serve Scalar reference at /reference', async () => {
|
||||
const res = await app.request('/reference')
|
||||
expect(res.status).toBe(200)
|
||||
})
|
||||
})
|
||||
|
||||
describe('CORS', () => {
|
||||
it('should have CORS headers enabled', async () => {
|
||||
const res = await app.request('/api/products', {
|
||||
headers: { 'x-forwarded-for': getUniqueIP() }
|
||||
})
|
||||
expect(res.headers.get('access-control-allow-origin')).toBeDefined()
|
||||
}, 10000)
|
||||
})
|
||||
})
|
||||
163
src/__tests__/products.test.ts
Normal file
163
src/__tests__/products.test.ts
Normal file
@ -0,0 +1,163 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { identifyProductAction, getActionConfig, getAllProducts, getProductActions } from '../config/products';
|
||||
|
||||
describe('Product Configuration', () => {
|
||||
describe('identifyProductAction', () => {
|
||||
it('should identify competitors search action', () => {
|
||||
const result = identifyProductAction('/api/competitors', 'GET');
|
||||
|
||||
expect(result.product).toBe('competitors');
|
||||
expect(result.action).toBe('search');
|
||||
expect(result.config).toBeDefined();
|
||||
expect(result.config?.costUnits).toBe(1.0);
|
||||
expect(result.config?.cancellable).toBe(true);
|
||||
});
|
||||
|
||||
it('should identify competitors get_details action with parameter', () => {
|
||||
const result = identifyProductAction('/api/competitors/ChIJd8BlQ2BZwokRAFUEcm_qrcA', 'GET');
|
||||
|
||||
expect(result.product).toBe('competitors');
|
||||
expect(result.action).toBe('get_details');
|
||||
expect(result.config).toBeDefined();
|
||||
expect(result.config?.costUnits).toBe(0.0);
|
||||
expect(result.config?.cancellable).toBe(false);
|
||||
});
|
||||
|
||||
it('should identify images upload action', () => {
|
||||
const result = identifyProductAction('/api/images', 'POST');
|
||||
|
||||
expect(result.product).toBe('images');
|
||||
expect(result.action).toBe('upload');
|
||||
expect(result.config).toBeDefined();
|
||||
expect(result.config?.costUnits).toBe(2.0);
|
||||
expect(result.config?.cancellable).toBe(true);
|
||||
});
|
||||
|
||||
it('should identify images get action with parameter', () => {
|
||||
const result = identifyProductAction('/api/images/abc123', 'GET');
|
||||
|
||||
expect(result.product).toBe('images');
|
||||
expect(result.action).toBe('get');
|
||||
expect(result.config?.costUnits).toBe(0.05);
|
||||
});
|
||||
|
||||
it('should return null for untracked endpoints', () => {
|
||||
const result = identifyProductAction('/api/unknown', 'GET');
|
||||
|
||||
expect(result.product).toBeNull();
|
||||
expect(result.action).toBeNull();
|
||||
expect(result.config).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for wrong method', () => {
|
||||
const result = identifyProductAction('/api/competitors', 'POST');
|
||||
|
||||
expect(result.product).toBeNull();
|
||||
expect(result.action).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle multiple path parameters', () => {
|
||||
const result = identifyProductAction('/api/images/abc123/xyz789', 'GET');
|
||||
|
||||
// Should not match since pattern is /api/images/:id
|
||||
expect(result.product).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getActionConfig', () => {
|
||||
it('should get config for valid product and action', () => {
|
||||
const config = getActionConfig('competitors', 'search');
|
||||
|
||||
expect(config).toBeDefined();
|
||||
expect(config?.endpoint).toBe('/api/competitors');
|
||||
expect(config?.method).toBe('GET');
|
||||
expect(config?.costUnits).toBe(1.0);
|
||||
});
|
||||
|
||||
it('should return null for invalid product', () => {
|
||||
const config = getActionConfig('invalid', 'search');
|
||||
|
||||
expect(config).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for invalid action', () => {
|
||||
const config = getActionConfig('competitors', 'invalid');
|
||||
|
||||
expect(config).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllProducts', () => {
|
||||
it('should return all product names', () => {
|
||||
const products = getAllProducts();
|
||||
|
||||
expect(Array.isArray(products)).toBe(true);
|
||||
expect(products).toContain('competitors');
|
||||
expect(products).toContain('images');
|
||||
expect(products.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getProductActions', () => {
|
||||
it('should return all actions for competitors product', () => {
|
||||
const actions = getProductActions('competitors');
|
||||
|
||||
expect(Array.isArray(actions)).toBe(true);
|
||||
expect(actions).toContain('search');
|
||||
expect(actions).toContain('get_details');
|
||||
});
|
||||
|
||||
it('should return all actions for images product', () => {
|
||||
const actions = getProductActions('images');
|
||||
|
||||
expect(Array.isArray(actions)).toBe(true);
|
||||
expect(actions).toContain('upload');
|
||||
expect(actions).toContain('get');
|
||||
expect(actions).toContain('update');
|
||||
});
|
||||
|
||||
it('should return empty array for invalid product', () => {
|
||||
const actions = getProductActions('invalid');
|
||||
|
||||
expect(Array.isArray(actions)).toBe(true);
|
||||
expect(actions.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Cost Units', () => {
|
||||
it('should have consistent cost units across products', () => {
|
||||
const competitorsSearch = getActionConfig('competitors', 'search');
|
||||
const competitorsDetails = getActionConfig('competitors', 'get_details');
|
||||
const imagesUpload = getActionConfig('images', 'upload');
|
||||
const imagesGet = getActionConfig('images', 'get');
|
||||
|
||||
// Search/upload should be more expensive than get
|
||||
expect(competitorsSearch?.costUnits).toBeGreaterThan(competitorsDetails?.costUnits || 0);
|
||||
expect(imagesUpload?.costUnits).toBeGreaterThan(imagesGet?.costUnits || 0);
|
||||
|
||||
// All cost units should be non-negative
|
||||
expect(competitorsSearch?.costUnits).toBeGreaterThan(0);
|
||||
expect(competitorsDetails?.costUnits).toBeGreaterThanOrEqual(0);
|
||||
expect(imagesUpload?.costUnits).toBeGreaterThan(0);
|
||||
expect(imagesGet?.costUnits).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Cancellable Actions', () => {
|
||||
it('should mark long-running actions as cancellable', () => {
|
||||
const competitorsSearch = getActionConfig('competitors', 'search');
|
||||
const imagesUpload = getActionConfig('images', 'upload');
|
||||
|
||||
expect(competitorsSearch?.cancellable).toBe(true);
|
||||
expect(imagesUpload?.cancellable).toBe(true);
|
||||
});
|
||||
|
||||
it('should mark quick actions as non-cancellable', () => {
|
||||
const competitorsDetails = getActionConfig('competitors', 'get_details');
|
||||
const imagesGet = getActionConfig('images', 'get');
|
||||
|
||||
expect(competitorsDetails?.cancellable).toBe(false);
|
||||
expect(imagesGet?.cancellable).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
145
src/__tests__/rateLimiter.test.ts
Normal file
145
src/__tests__/rateLimiter.test.ts
Normal file
@ -0,0 +1,145 @@
|
||||
import { describe, it, expect } from 'vitest'
|
||||
import { app } from '../index'
|
||||
|
||||
describe('Rate Limiting', () => {
|
||||
// Helper to make multiple requests sequentially
|
||||
async function makeSequentialRequests(path: string, count: number, ip?: string) {
|
||||
const responses = []
|
||||
for (let i = 0; i < count; i++) {
|
||||
const headers: Record<string, string> = {}
|
||||
if (ip) {
|
||||
headers['x-forwarded-for'] = ip
|
||||
}
|
||||
const response = await app.request(path, { headers })
|
||||
responses.push(response)
|
||||
}
|
||||
return responses
|
||||
}
|
||||
|
||||
// Helper to make multiple requests concurrently
|
||||
async function makeRequests(path: string, count: number, ip?: string) {
|
||||
const requests = []
|
||||
for (let i = 0; i < count; i++) {
|
||||
const headers: Record<string, string> = {}
|
||||
if (ip) {
|
||||
headers['x-forwarded-for'] = ip
|
||||
}
|
||||
requests.push(app.request(path, { headers }))
|
||||
}
|
||||
return Promise.all(requests)
|
||||
}
|
||||
|
||||
describe('API Rate Limiting', () => {
|
||||
it('should allow requests within rate limit', async () => {
|
||||
// Make 2 requests sequentially (limit is 2 per 100ms)
|
||||
const responses = await makeSequentialRequests('/api/subscriptions', 2, '192.168.2.1')
|
||||
|
||||
// Both requests should succeed
|
||||
expect(responses[0].status).toBe(200)
|
||||
expect(responses[1].status).toBe(200)
|
||||
})
|
||||
|
||||
it('should block requests exceeding rate limit', async () => {
|
||||
// Make 3 requests quickly from same IP
|
||||
const responses = await makeSequentialRequests('/api/subscriptions', 3, '192.168.2.2')
|
||||
|
||||
// First 2 should succeed
|
||||
expect(responses[0].status).toBe(200)
|
||||
expect(responses[1].status).toBe(200)
|
||||
|
||||
// Third should be rate limited
|
||||
expect(responses[2].status).toBe(429)
|
||||
|
||||
const errorData = await responses[2].json()
|
||||
expect(errorData).toHaveProperty('error')
|
||||
expect(errorData.error).toBe('Too many requests')
|
||||
})
|
||||
|
||||
it('should include rate limit headers', async () => {
|
||||
const response = await app.request('/api/products', {
|
||||
headers: { 'x-forwarded-for': '192.168.2.3' }
|
||||
})
|
||||
|
||||
// Check for rate limit headers
|
||||
const headers = response.headers
|
||||
expect(headers.get('ratelimit-limit')).toBeDefined()
|
||||
expect(headers.get('ratelimit-remaining')).toBeDefined()
|
||||
}, 10000)
|
||||
|
||||
it('should track different IPs separately', async () => {
|
||||
// Make 2 requests from IP 1
|
||||
const ip1Responses = await makeRequests('/api/subscriptions', 2, '192.168.2.4')
|
||||
|
||||
// Make 2 requests from IP 2
|
||||
const ip2Responses = await makeRequests('/api/subscriptions', 2, '192.168.2.5')
|
||||
|
||||
// All should succeed as they're from different IPs
|
||||
expect(ip1Responses[0].status).toBe(200)
|
||||
expect(ip1Responses[1].status).toBe(200)
|
||||
expect(ip2Responses[0].status).toBe(200)
|
||||
expect(ip2Responses[1].status).toBe(200)
|
||||
})
|
||||
|
||||
it('should track authenticated users separately from IP', async () => {
|
||||
// Request with auth header
|
||||
const authResponse = await app.request('/api/subscriptions', {
|
||||
headers: {
|
||||
'authorization': 'Bearer user123',
|
||||
'x-forwarded-for': '192.168.2.6'
|
||||
}
|
||||
})
|
||||
|
||||
// Request from same IP but no auth
|
||||
const noAuthResponse = await app.request('/api/subscriptions', {
|
||||
headers: {
|
||||
'x-forwarded-for': '192.168.2.6'
|
||||
}
|
||||
})
|
||||
|
||||
// Both should succeed as they're tracked separately
|
||||
expect(authResponse.status).toBe(200)
|
||||
expect(noAuthResponse.status).toBe(200)
|
||||
})
|
||||
|
||||
it('should reset rate limit after time window', async () => {
|
||||
// Make 2 requests (hit the limit)
|
||||
const firstBatch = await makeSequentialRequests('/api/subscriptions', 2, '192.168.2.7')
|
||||
expect(firstBatch[0].status).toBe(200)
|
||||
expect(firstBatch[1].status).toBe(200)
|
||||
|
||||
// Wait for the time window to pass (100ms + buffer)
|
||||
await new Promise(resolve => setTimeout(resolve, 150))
|
||||
|
||||
// Should be able to make requests again
|
||||
const response = await app.request('/api/subscriptions', {
|
||||
headers: { 'x-forwarded-for': '192.168.2.7' }
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
}, 15000)
|
||||
|
||||
it('should not rate limit non-API routes', async () => {
|
||||
// Root route should not be rate limited
|
||||
const responses = await makeRequests('/', 5, '192.168.2.8')
|
||||
|
||||
// All should succeed
|
||||
responses.forEach(response => {
|
||||
expect(response.status).toBe(200)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Rate Limit Error Response', () => {
|
||||
it('should return proper error structure when rate limited', async () => {
|
||||
// Exceed rate limit
|
||||
const responses = await makeSequentialRequests('/api/subscriptions', 3, '192.168.2.9')
|
||||
|
||||
const errorResponse = responses[2]
|
||||
expect(errorResponse.status).toBe(429)
|
||||
|
||||
const errorData = await errorResponse.json()
|
||||
expect(errorData).toHaveProperty('error')
|
||||
expect(errorData).toHaveProperty('message')
|
||||
expect(errorData.message).toContain('Rate limit exceeded')
|
||||
})
|
||||
})
|
||||
})
|
||||
123
src/cache.ts
Normal file
123
src/cache.ts
Normal file
@ -0,0 +1,123 @@
|
||||
import { getCache } from './commons/cache/index.js';
|
||||
import { appEvents } from './events.js';
|
||||
import { CacheEntryInfo, CacheInfo } from './commons/cache/types.js';
|
||||
import pino from 'pino';
|
||||
import path from 'path';
|
||||
|
||||
const logFile = path.join(process.cwd(), 'logs', 'cache.json');
|
||||
|
||||
const fileTransport = pino.transport({
|
||||
target: 'pino/file',
|
||||
options: { destination: logFile, mkdir: true }
|
||||
});
|
||||
|
||||
const logger = pino(
|
||||
{
|
||||
level: process.env.PINO_LOG_LEVEL || 'info',
|
||||
base: { product: 'cache' },
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
},
|
||||
pino.multistream([
|
||||
{ stream: fileTransport, level: 'info' }
|
||||
])
|
||||
);
|
||||
|
||||
|
||||
export class AppCache {
|
||||
private static instance: AppCache;
|
||||
|
||||
// Dependencies: key -> [dependencies]
|
||||
// Defines what each type DEPENDS ON.
|
||||
// If 'categories' changes, any type that has 'categories' in its dependency list must be invalidated.
|
||||
private static DEPENDENCIES: Record<string, string[]> = {
|
||||
'posts': ['categories', 'pictures'], // posts depend on categories and pictures
|
||||
'pages': ['categories', 'pictures', 'translations'],
|
||||
'categories': ['types'],
|
||||
'translations': [], // widget/category translations (wt:* keys)
|
||||
'feed': ['posts', 'pages', 'categories'],
|
||||
'auth': [] // No dependencies, standalone
|
||||
};
|
||||
|
||||
private constructor() { }
|
||||
|
||||
public static getInstance(): AppCache {
|
||||
if (!AppCache.instance) {
|
||||
AppCache.instance = new AppCache();
|
||||
}
|
||||
return AppCache.instance;
|
||||
}
|
||||
|
||||
public async get<T>(type: string): Promise<T | null> {
|
||||
const cache = getCache();
|
||||
const val = await cache.get<T>(type);
|
||||
return val;
|
||||
}
|
||||
|
||||
public async set<T>(type: string, data: T, ttl?: number): Promise<void> {
|
||||
const cache = getCache();
|
||||
await cache.set(type, data, ttl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Silent cache invalidation — clears cache for the given type and
|
||||
* cascades to dependents. Does NOT emit SSE events.
|
||||
* Use `notify()` in route handlers for explicit SSE.
|
||||
*/
|
||||
public async invalidate(type: string): Promise<void> {
|
||||
const cache = getCache();
|
||||
|
||||
if (type === 'feed') {
|
||||
await cache.flush('*-feed*');
|
||||
await cache.flush('home-feed*');
|
||||
} else if (type === 'translations') {
|
||||
await cache.flush('wt:*');
|
||||
await cache.flush('page-details-*');
|
||||
} else {
|
||||
await cache.del(type);
|
||||
}
|
||||
|
||||
// Find types that depend on this type
|
||||
const dependents = Object.keys(AppCache.DEPENDENCIES).filter(key =>
|
||||
AppCache.DEPENDENCIES[key].includes(type)
|
||||
);
|
||||
|
||||
logger.info({ type, dependents }, 'Cache invalidated');
|
||||
|
||||
if (dependents.length > 0) {
|
||||
await Promise.all(dependents.map(dep => this.invalidate(dep)));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush cache entries by pattern. Silent — no SSE.
|
||||
*/
|
||||
public async flush(pattern?: string): Promise<void> {
|
||||
const cache = getCache();
|
||||
await cache.flush(pattern);
|
||||
logger.info({ pattern: pattern || 'all' }, 'Cache flushed');
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit exactly 1 SSE event to notify clients of a change.
|
||||
* Call this in route handlers AFTER cache invalidation.
|
||||
*
|
||||
* @param type - Entity type (e.g. 'post', 'page', 'category', 'picture')
|
||||
* @param id - Entity ID (null for list-level / system changes)
|
||||
* @param action - The mutation that occurred
|
||||
*/
|
||||
public notify(type: string, id: string | null, action: 'create' | 'update' | 'delete'): void {
|
||||
logger.info({ type, id, action }, 'Cache notify');
|
||||
appEvents.emitUpdate(type, action, { id }, 'cache');
|
||||
}
|
||||
|
||||
public inspect(): { info: CacheInfo; dependencies: Record<string, string[]>; entries: CacheEntryInfo[] } {
|
||||
const cache = getCache();
|
||||
return {
|
||||
info: cache.info(),
|
||||
dependencies: AppCache.DEPENDENCIES,
|
||||
entries: cache.entries(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const appCache = AppCache.getInstance();
|
||||
67
src/commons/cache/MemoryCache.ts
vendored
Normal file
67
src/commons/cache/MemoryCache.ts
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
import { LRUCache } from 'lru-cache';
|
||||
import { CacheAdapter } from './types.js';
|
||||
|
||||
export class MemoryCache implements CacheAdapter {
|
||||
private cache: LRUCache<string, any>;
|
||||
|
||||
constructor() {
|
||||
const defaultTtl = process.env.CACHE_DEFAULT_TTL ? parseInt(process.env.CACHE_DEFAULT_TTL) : 1000 * 60 * 5; // 5 mins default
|
||||
this.cache = new LRUCache({
|
||||
max: 1000,
|
||||
ttl: defaultTtl,
|
||||
updateAgeOnGet: false,
|
||||
});
|
||||
}
|
||||
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
const value = this.cache.get(key);
|
||||
return (value as T) || null;
|
||||
}
|
||||
|
||||
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
|
||||
this.cache.set(key, value, { ttl: ttl ? ttl * 1000 : undefined });
|
||||
}
|
||||
|
||||
async del(key: string): Promise<void> {
|
||||
this.cache.delete(key);
|
||||
}
|
||||
|
||||
async flush(pattern?: string): Promise<void> {
|
||||
if (pattern) {
|
||||
// Support simple wildcard patterns (e.g. "home-feed*", "*-feed*")
|
||||
// Escape special regex chars except *, then replace * with .*
|
||||
const regexPattern = pattern
|
||||
.replace(/[.+^${}()|[\]\\]/g, '\\$&') // Escape regex chars
|
||||
.replace(/\*/g, '.*'); // Convert * to .*
|
||||
|
||||
const regex = new RegExp(`^${regexPattern}$`);
|
||||
|
||||
for (const key of this.cache.keys()) {
|
||||
if (typeof key === 'string' && regex.test(key)) {
|
||||
this.cache.delete(key);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.cache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
keys(): string[] {
|
||||
return [...this.cache.keys()];
|
||||
}
|
||||
|
||||
info() {
|
||||
return {
|
||||
size: this.cache.size,
|
||||
max: this.cache.max,
|
||||
provider: 'memory-lru',
|
||||
};
|
||||
}
|
||||
|
||||
entries() {
|
||||
return [...this.cache.keys()].map(key => ({
|
||||
key,
|
||||
remainingTTL: this.cache.getRemainingTTL(key),
|
||||
}));
|
||||
}
|
||||
}
|
||||
18
src/commons/cache/index.ts
vendored
Normal file
18
src/commons/cache/index.ts
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
import { CacheAdapter } from './types.js';
|
||||
import { MemoryCache } from './MemoryCache.js';
|
||||
|
||||
// Design Pattern: Singleton or Factory
|
||||
// For now, we export a singleton instance based on ENV or default to Memory
|
||||
// Future: Read process.env.CACHE_PROVIDER == 'redis'
|
||||
|
||||
let instance: CacheAdapter | null = null;
|
||||
|
||||
export const getCache = (): CacheAdapter => {
|
||||
if (!instance) {
|
||||
instance = new MemoryCache();
|
||||
}
|
||||
return instance;
|
||||
};
|
||||
|
||||
export * from './types.js';
|
||||
export * from './MemoryCache.js';
|
||||
21
src/commons/cache/types.ts
vendored
Normal file
21
src/commons/cache/types.ts
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
export interface CacheEntryInfo {
|
||||
key: string;
|
||||
remainingTTL: number; // ms remaining, 0 = no TTL / infinite
|
||||
sizeEstimate?: number; // rough byte estimate if available
|
||||
}
|
||||
|
||||
export interface CacheInfo {
|
||||
size: number;
|
||||
max: number;
|
||||
provider: string;
|
||||
}
|
||||
|
||||
export interface CacheAdapter {
|
||||
get<T>(key: string): Promise<T | null>;
|
||||
set<T>(key: string, value: T, ttl?: number): Promise<void>;
|
||||
del(key: string): Promise<void>;
|
||||
flush(pattern?: string): Promise<void>;
|
||||
keys(): string[];
|
||||
info(): CacheInfo;
|
||||
entries(): CacheEntryInfo[];
|
||||
}
|
||||
249
src/commons/decorators.ts
Normal file
249
src/commons/decorators.ts
Normal file
@ -0,0 +1,249 @@
|
||||
import { trackUsage, updateUsageRecord } from '../middleware/usageTracking.js';
|
||||
import { FunctionRegistry, PublicEndpointRegistry, AdminEndpointRegistry } from './registry.js';
|
||||
import { logger } from './logger.js';
|
||||
|
||||
/**
|
||||
* Decorator/Wrapper to mark an endpoint as public
|
||||
* Registers the route in PublicEndpointRegistry
|
||||
*/
|
||||
export function Public<T extends { method: string, path: string }>(route: T): T {
|
||||
PublicEndpointRegistry.register(route.path, route.method);
|
||||
return route;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decorator/Wrapper to mark an endpoint as admin-only
|
||||
* Registers the route in AdminEndpointRegistry
|
||||
*/
|
||||
export function Admin<T extends { method: string, path: string }>(route: T): T {
|
||||
AdminEndpointRegistry.register(route.path, route.method);
|
||||
return route;
|
||||
}
|
||||
|
||||
export interface BillableContext {
|
||||
userId: string;
|
||||
jobId: string;
|
||||
signal?: AbortSignal;
|
||||
metadata?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface BillableOptions {
|
||||
productId: string;
|
||||
actionId: string;
|
||||
cancellable?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decorator to mark a method as billable
|
||||
* Handles usage tracking, context injection, and cancellation
|
||||
*/
|
||||
export function Billable(options: BillableOptions) {
|
||||
return function (
|
||||
target: any,
|
||||
propertyKey: string,
|
||||
descriptor: PropertyDescriptor
|
||||
) {
|
||||
const originalMethod = descriptor.value;
|
||||
|
||||
descriptor.value = async function (...args: any[]) {
|
||||
// 1. Extract context
|
||||
// Assumes the first argument is BillableContext, or it's part of the first argument object
|
||||
let context: BillableContext | undefined;
|
||||
|
||||
if (args.length > 0 && typeof args[0] === 'object') {
|
||||
// Check if first arg is context
|
||||
if ('userId' in args[0] && 'jobId' in args[0]) {
|
||||
context = args[0] as BillableContext;
|
||||
}
|
||||
}
|
||||
|
||||
if (!context) {
|
||||
// If no context provided, we can't track usage properly
|
||||
// For now, we'll log a warning and proceed without tracking
|
||||
// In strict mode, we might want to throw an error
|
||||
logger.warn(`[Billable] No context provided for ${options.productId}:${options.actionId}`);
|
||||
return originalMethod.apply(this, args);
|
||||
}
|
||||
|
||||
// 2. Get config
|
||||
const config = FunctionRegistry.get(options.productId, options.actionId);
|
||||
if (!config) {
|
||||
logger.warn(`[Billable] No config found for ${options.productId}:${options.actionId}`);
|
||||
return originalMethod.apply(this, args);
|
||||
}
|
||||
|
||||
// 3. Start tracking
|
||||
const usageId = await trackUsage({
|
||||
userId: context.userId,
|
||||
endpoint: 'function', // Internal function call
|
||||
method: 'CALL',
|
||||
product: options.productId,
|
||||
action: options.actionId,
|
||||
costUnits: config.costUnits,
|
||||
cancellable: options.cancellable || false,
|
||||
jobId: context.jobId,
|
||||
metadata: context.metadata
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
let error: Error | null = null;
|
||||
let result: any;
|
||||
|
||||
try {
|
||||
// 4. Execute method
|
||||
// If cancellable, we should ideally wrap the execution or check signal
|
||||
if (options.cancellable && context.signal) {
|
||||
if (context.signal.aborted) {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
|
||||
// Add abort listener
|
||||
context.signal.addEventListener('abort', () => {
|
||||
logger.info(`[Billable] Job ${context?.jobId} aborted via signal`);
|
||||
});
|
||||
}
|
||||
|
||||
result = await originalMethod.apply(this, args);
|
||||
return result;
|
||||
} catch (err) {
|
||||
error = err as Error;
|
||||
throw err;
|
||||
} finally {
|
||||
// 5. End tracking
|
||||
if (usageId) {
|
||||
const endTime = Date.now();
|
||||
await updateUsageRecord({
|
||||
usageId,
|
||||
responseStatus: error ? 500 : 200,
|
||||
responseTimeMs: endTime - startTime,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return descriptor;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Class Decorator: Registers the worker queue name
|
||||
*/
|
||||
export function Worker(queueName: string) {
|
||||
return function <T extends { new(...args: any[]): any }>(constructor: T) {
|
||||
// We can't easily access the instance method 'handler' here without instantiating
|
||||
// So we assume the class has a 'handler' method or we register the class itself
|
||||
// For simplicity, let's assume we'll instantiate it later or the registry handles it.
|
||||
// But wait, pg-boss needs a function.
|
||||
// Let's store the constructor in the registry, and the registry (or bootstrap) will instantiate and bind.
|
||||
|
||||
// Actually, let's just attach the queue name to the class for now,
|
||||
// and let a separate scanner or manual registration use it.
|
||||
// OR, we can register a factory.
|
||||
|
||||
// Better approach for now: Register the prototype's handler if it exists.
|
||||
// But 'handler' is on the instance usually.
|
||||
|
||||
// Let's just modify the class to have a static 'queueName' property
|
||||
// and register it.
|
||||
(constructor as any).queueName = queueName;
|
||||
};
|
||||
}
|
||||
|
||||
import { Context } from 'hono';
|
||||
import { getCache } from './cache/index.js';
|
||||
|
||||
type KeyGenerator = (c: Context) => string;
|
||||
|
||||
const defaultKeyInfo = (c: Context) => {
|
||||
const url = new URL(c.req.url);
|
||||
url.searchParams.sort();
|
||||
return `auto-cache:${c.req.method}:${url.pathname}${url.search}`;
|
||||
};
|
||||
|
||||
export const CachedHandler = (
|
||||
handler: (c: Context) => Promise<Response>,
|
||||
options?: {
|
||||
ttl?: number,
|
||||
keyGenerator?: KeyGenerator,
|
||||
skipAuth?: boolean, // Default true
|
||||
varyByAuth?: boolean, // If true, includes auth token in key and disables skipAuth default
|
||||
maxSizeBytes?: number // Default: 1MB
|
||||
}
|
||||
) => async (c: Context) => {
|
||||
const opts = options || {};
|
||||
const ttl = opts.ttl || 300;
|
||||
const varyByAuth = opts.varyByAuth || false;
|
||||
const skipAuth = opts.skipAuth !== undefined ? opts.skipAuth : !varyByAuth; // Default true unless varyByAuth is true
|
||||
const maxSizeBytes = opts.maxSizeBytes || 1024 * 1024; // 1MB
|
||||
const keyGen = opts.keyGenerator || defaultKeyInfo;
|
||||
|
||||
// 1. Auth Bypass
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (skipAuth && authHeader) {
|
||||
// Explicitly mark as skipped due to auth
|
||||
c.header('X-Cache', 'SKIP');
|
||||
return handler(c);
|
||||
}
|
||||
|
||||
const cache = getCache();
|
||||
let key = keyGen(c);
|
||||
|
||||
// Append Auth to key if requested (User Isolation)
|
||||
if (varyByAuth && authHeader) {
|
||||
key += `|auth=${authHeader}`;
|
||||
}
|
||||
const bypass = c.req.query('cache') === 'false' || c.req.query('nocache') === 'true';
|
||||
|
||||
// 2. Hit
|
||||
if (!bypass) {
|
||||
const cached = await cache.get(key);
|
||||
if (cached) {
|
||||
c.header('X-Cache', 'HIT');
|
||||
const cachedVal = cached as any;
|
||||
if (cachedVal.contentType) c.header('Content-Type', cachedVal.contentType);
|
||||
if (varyByAuth) c.header('Vary', 'Authorization');
|
||||
return c.body(cachedVal.data);
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Miss
|
||||
const response = await handler(c);
|
||||
|
||||
// 4. Save
|
||||
if (response instanceof Response && response.ok) {
|
||||
const cloned = response.clone();
|
||||
try {
|
||||
const contentType = response.headers.get('Content-Type') || 'application/json';
|
||||
let data: any;
|
||||
|
||||
// Check content length if available
|
||||
const contentLength = cloned.headers.get('Content-Length');
|
||||
if (contentLength && parseInt(contentLength) > maxSizeBytes) {
|
||||
return response;
|
||||
}
|
||||
|
||||
if (contentType.includes('application/json')) {
|
||||
const jsonObj = await cloned.json();
|
||||
data = JSON.stringify(jsonObj);
|
||||
} else {
|
||||
data = await cloned.text();
|
||||
}
|
||||
|
||||
// Double check actual size after reading
|
||||
if (data.length > maxSizeBytes) {
|
||||
return response;
|
||||
}
|
||||
|
||||
await cache.set(key, { data, contentType }, ttl);
|
||||
c.header('X-Cache', bypass ? 'BYPASS' : 'MISS');
|
||||
if (varyByAuth) c.header('Vary', 'Authorization');
|
||||
} catch (e) {
|
||||
logger.error({ err: e }, 'Cache interception failed');
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
185
src/commons/log-routes-factory.ts
Normal file
185
src/commons/log-routes-factory.ts
Normal file
@ -0,0 +1,185 @@
|
||||
import { createRoute, z } from '@hono/zod-openapi';
|
||||
import { Context } from 'hono';
|
||||
import { streamSSE, stream } from 'hono/streaming';
|
||||
import fs from 'fs';
|
||||
import readline from 'readline';
|
||||
|
||||
/**
|
||||
* Creates OpenAPI route definitions for standard log endpoints.
|
||||
* @param tag The OpenAPI tag for grouping (e.g. 'System', 'Images')
|
||||
* @param pathPrefix The URL path prefix (e.g. '/api/logs/system')
|
||||
*/
|
||||
export const createLogRoutes = (tag: string, pathPrefix: string) => {
|
||||
const getRoute = createRoute({
|
||||
method: 'get',
|
||||
path: pathPrefix,
|
||||
tags: [tag],
|
||||
summary: `Get ${tag} logs`,
|
||||
description: `Download or view ${tag} logs as a JSON array`,
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Log content',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.array(z.record(z.string(), z.any())).openapi({
|
||||
description: 'Array of log entries'
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Log file not found'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const streamRoute = createRoute({
|
||||
method: 'get',
|
||||
path: `${pathPrefix}/stream`,
|
||||
tags: [tag],
|
||||
summary: `Stream ${tag} logs`,
|
||||
description: `Stream ${tag} logs via SSE (Server-Sent Events)`,
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Log stream',
|
||||
content: {
|
||||
'text/event-stream': { schema: z.string() }
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { getRoute, streamRoute };
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates Hono handlers for standard log endpoints.
|
||||
* @param filePath The absolute path to the log file
|
||||
*/
|
||||
export const createLogHandlers = (filePath: string) => {
|
||||
const getHandler = async (c: Context) => {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return c.text('Log file not found', 404);
|
||||
}
|
||||
|
||||
c.header('Content-Type', 'application/json');
|
||||
|
||||
return stream(c, async (stream) => {
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity
|
||||
});
|
||||
|
||||
await stream.write('[');
|
||||
let first = true;
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line.trim()) continue;
|
||||
if (!first) {
|
||||
await stream.write(',');
|
||||
}
|
||||
try {
|
||||
// Validate JSON to ensure we don't stream garbage?
|
||||
// Optional: overhead. Assuming pino writes valid JSON per line.
|
||||
// Just writing the line directly is faster.
|
||||
await stream.write(line);
|
||||
first = false;
|
||||
} catch (e) {
|
||||
// Ignore broken lines
|
||||
}
|
||||
}
|
||||
|
||||
await stream.write(']');
|
||||
});
|
||||
};
|
||||
|
||||
const streamHandler = async (c: Context) => {
|
||||
return streamSSE(c, async (stream) => {
|
||||
// Send initial connection message
|
||||
await stream.writeSSE({
|
||||
data: JSON.stringify({ type: 'info', message: 'Connected to log stream' })
|
||||
});
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
await stream.writeSSE({
|
||||
data: JSON.stringify({ type: 'error', message: 'Log file not found' })
|
||||
});
|
||||
// We keep the stream open in case the file is created later
|
||||
}
|
||||
|
||||
let currentSize = 0;
|
||||
// Check initial size
|
||||
if (fs.existsSync(filePath)) {
|
||||
const stat = fs.statSync(filePath);
|
||||
currentSize = stat.size;
|
||||
|
||||
// Optional: Tail current content?
|
||||
// For simplicity, we start streaming NEW content.
|
||||
// But let's verify if we should send a bit of context.
|
||||
// If it's a JSON log, partial lines are bad.
|
||||
// If it's text, partial lines are bad too.
|
||||
// Let's just track current size and stream updates.
|
||||
}
|
||||
|
||||
const checkInterval = 250; // Check every 250ms
|
||||
|
||||
const interval = setInterval(async () => {
|
||||
try {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
if (currentSize > 0) {
|
||||
currentSize = 0; // File deleted
|
||||
await stream.writeSSE({ data: JSON.stringify({ type: 'info', message: 'Log file deleted' }) });
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const stat = fs.statSync(filePath);
|
||||
|
||||
if (stat.size > currentSize) {
|
||||
const sizeDiff = stat.size - currentSize;
|
||||
const buffer = Buffer.alloc(sizeDiff);
|
||||
const fd = fs.openSync(filePath, 'r');
|
||||
try {
|
||||
fs.readSync(fd, buffer, 0, sizeDiff, currentSize);
|
||||
currentSize = stat.size;
|
||||
|
||||
const chunk = buffer.toString('utf-8');
|
||||
// If it's line-delimited JSON or text
|
||||
const lines = chunk.split('\n');
|
||||
// Note: the last line might be incomplete if we read exactly to EOF while writing?
|
||||
// But usually logger writes full lines.
|
||||
// However, strictly we should handle buffering incomplete lines.
|
||||
// For this "standard" goal, let's assume atomic writes or simple line splitting.
|
||||
|
||||
// To be safer with valid JSON, we could filter empty lines.
|
||||
for (const line of lines) {
|
||||
if (!line.trim()) continue;
|
||||
await stream.writeSSE({ data: line });
|
||||
}
|
||||
} finally {
|
||||
fs.closeSync(fd);
|
||||
}
|
||||
} else if (stat.size < currentSize) {
|
||||
// Truncated / Rotated
|
||||
currentSize = stat.size;
|
||||
await stream.writeSSE({
|
||||
data: JSON.stringify({ type: 'info', message: 'Log rotated' })
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Stream error:', e);
|
||||
}
|
||||
}, checkInterval);
|
||||
|
||||
stream.onAbort(() => {
|
||||
clearInterval(interval);
|
||||
});
|
||||
|
||||
// Keep the stream alive
|
||||
await new Promise(() => { });
|
||||
});
|
||||
};
|
||||
|
||||
return { getHandler, streamHandler };
|
||||
};
|
||||
79
src/commons/logger.ts
Normal file
79
src/commons/logger.ts
Normal file
@ -0,0 +1,79 @@
|
||||
import pino from 'pino';
|
||||
import path from 'path';
|
||||
import { mkdirSync } from 'fs';
|
||||
|
||||
// Ensure logs directory exists
|
||||
try {
|
||||
mkdirSync(path.join(process.cwd(), 'logs'), { recursive: true });
|
||||
} catch (err) {
|
||||
// Directory already exists
|
||||
}
|
||||
|
||||
const fileTransport = pino.transport({
|
||||
target: 'pino/file',
|
||||
options: { destination: path.join(process.cwd(), 'app.log') },
|
||||
});
|
||||
|
||||
const consoleTransport = pino.transport({
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
colorize: true,
|
||||
ignore: 'pid,hostname',
|
||||
destination: 1,
|
||||
},
|
||||
});
|
||||
|
||||
export const logger = pino(
|
||||
{
|
||||
level: process.env.PINO_LOG_LEVEL || 'info',
|
||||
formatters: {
|
||||
level: (label) => {
|
||||
return { level: label.toUpperCase() };
|
||||
},
|
||||
},
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
},
|
||||
pino.multistream([
|
||||
{ stream: fileTransport, level: 'info' },
|
||||
{ stream: consoleTransport, level: 'info' },
|
||||
])
|
||||
);
|
||||
|
||||
// Security logger - writes to logs/security.json
|
||||
const securityFileTransport = pino.transport({
|
||||
target: 'pino/file',
|
||||
options: {
|
||||
destination: path.join(process.cwd(), 'logs', 'security.json'),
|
||||
mkdir: true
|
||||
},
|
||||
});
|
||||
|
||||
const securityConsoleTransport = pino.transport({
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
colorize: true,
|
||||
ignore: 'pid,hostname',
|
||||
destination: 1,
|
||||
},
|
||||
});
|
||||
|
||||
export const securityLogger = pino(
|
||||
{
|
||||
level: process.env.PINO_LOG_LEVEL || 'info',
|
||||
formatters: {
|
||||
level: (label) => {
|
||||
return { level: label.toUpperCase() };
|
||||
},
|
||||
},
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
base: {
|
||||
logger: 'security'
|
||||
}
|
||||
},
|
||||
pino.multistream([
|
||||
{ stream: securityFileTransport, level: 'info' },
|
||||
{ stream: securityConsoleTransport, level: 'info' },
|
||||
])
|
||||
);
|
||||
|
||||
export default logger;
|
||||
192
src/commons/registry.ts
Normal file
192
src/commons/registry.ts
Normal file
@ -0,0 +1,192 @@
|
||||
import { ProductActionConfig, PRODUCT_ACTIONS } from '../config/products.js';
|
||||
|
||||
export interface BillableFunctionConfig extends ProductActionConfig {
|
||||
productId: string;
|
||||
actionId: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Central registry for all billable functions
|
||||
* Manages configuration, costs, and metadata
|
||||
*/
|
||||
export class FunctionRegistry {
|
||||
private static registry = new Map<string, BillableFunctionConfig>();
|
||||
private static initialized = false;
|
||||
|
||||
/**
|
||||
* Initialize the registry with default configurations
|
||||
*/
|
||||
static initialize() {
|
||||
if (this.initialized) return;
|
||||
|
||||
// Load legacy PRODUCT_ACTIONS
|
||||
for (const [productId, actions] of Object.entries(PRODUCT_ACTIONS)) {
|
||||
for (const [actionId, config] of Object.entries(actions)) {
|
||||
this.register({
|
||||
productId,
|
||||
actionId,
|
||||
...config
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.initialized = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a new billable function
|
||||
*/
|
||||
static register(config: BillableFunctionConfig) {
|
||||
const key = this.getKey(config.productId, config.actionId);
|
||||
this.registry.set(key, config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get configuration for a specific function
|
||||
*/
|
||||
static get(productId: string, actionId: string): BillableFunctionConfig | null {
|
||||
if (!this.initialized) this.initialize();
|
||||
const key = this.getKey(productId, actionId);
|
||||
return this.registry.get(key) || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered functions
|
||||
*/
|
||||
static getAll(): BillableFunctionConfig[] {
|
||||
if (!this.initialized) this.initialize();
|
||||
return Array.from(this.registry.values());
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a configuration by matching route endpoint and method
|
||||
* (Used for middleware backward compatibility)
|
||||
*/
|
||||
static findByRoute(path: string, method: string): BillableFunctionConfig | null {
|
||||
if (!this.initialized) this.initialize();
|
||||
|
||||
for (const config of this.registry.values()) {
|
||||
if (this.matchesRoute(path, config.endpoint) && method === config.method) {
|
||||
return config;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static getKey(productId: string, actionId: string): string {
|
||||
return `${productId}:${actionId}`;
|
||||
}
|
||||
|
||||
private static matchesRoute(path: string, pattern: string): boolean {
|
||||
// Convert pattern to regex
|
||||
// Handle both :param (Express/Hono style) and {param} (OpenAPI style)
|
||||
// e.g., '/api/competitors/:place_id' or '/api/competitors/{place_id}' -> /^\/api\/competitors\/[^\/]+$/
|
||||
const regexPattern = pattern
|
||||
.replace(/:[^\\/]+/g, '[^/]+') // Replace :param with regex
|
||||
.replace(/\{[^}]+\}/g, '[^/]+') // Replace {param} with regex
|
||||
.replace(/\//g, '\\/');
|
||||
|
||||
// Allow optional trailing slash
|
||||
const regex = new RegExp(`^${regexPattern}\\/?$`);
|
||||
return regex.test(path);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Registry for public endpoints that don't require authentication
|
||||
*/
|
||||
export class PublicEndpointRegistry {
|
||||
private static registry = new Set<string>();
|
||||
|
||||
static register(path: string, method: string) {
|
||||
this.registry.add(`${method.toUpperCase()}:${path}`);
|
||||
}
|
||||
|
||||
static getAll(): Array<{ path: string; method: string }> {
|
||||
return Array.from(this.registry).map(entry => {
|
||||
// Split only on the FIRST colon (METHOD:PATH)
|
||||
// Don't split on colons in path parameters like :identifier
|
||||
const colonIndex = entry.indexOf(':');
|
||||
const method = entry.substring(0, colonIndex);
|
||||
const path = entry.substring(colonIndex + 1);
|
||||
return { path, method };
|
||||
});
|
||||
}
|
||||
|
||||
static isPublic(path: string, method: string): boolean {
|
||||
const methodUpper = method.toUpperCase();
|
||||
|
||||
for (const registered of this.registry) {
|
||||
// Split only on the FIRST colon (METHOD:PATH)
|
||||
// Don't split on colons in path parameters like :identifier
|
||||
const colonIndex = registered.indexOf(':');
|
||||
const regMethod = registered.substring(0, colonIndex);
|
||||
const regPath = registered.substring(colonIndex + 1);
|
||||
|
||||
if (regMethod !== methodUpper) continue;
|
||||
|
||||
// Check if path matches pattern
|
||||
if (this.matchesRoute(path, regPath)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// Debug: log when a route is not found as public
|
||||
// console.log(`[PublicEndpointRegistry] Route not found as public: ${methodUpper} ${path}`);
|
||||
// console.log(`[PublicEndpointRegistry] Registered routes:`, Array.from(this.registry));
|
||||
return false;
|
||||
}
|
||||
|
||||
private static matchesRoute(path: string, pattern: string): boolean {
|
||||
// Convert pattern to regex
|
||||
// Handle both :param (Express/Hono style) and {param} (OpenAPI style)
|
||||
// e.g., '/api/competitors/:place_id' or '/api/competitors/{place_id}' -> /^\/api\/competitors\/[^\/]+$/
|
||||
const regexPattern = pattern
|
||||
.replace(/:[^\/]+/g, '[^/]+') // Replace :param with regex
|
||||
.replace(/\{[^}]+\}/g, '[^/]+') // Replace {param} with regex
|
||||
.replace(/\//g, '\\/');
|
||||
|
||||
// Allow optional trailing slash
|
||||
const regex = new RegExp(`^${regexPattern}\\/?$`);
|
||||
return regex.test(path);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Registry for admin-only endpoints
|
||||
*/
|
||||
export class AdminEndpointRegistry {
|
||||
private static registry = new Set<string>();
|
||||
|
||||
static register(path: string, method: string) {
|
||||
this.registry.add(`${method.toUpperCase()}:${path}`);
|
||||
}
|
||||
|
||||
static isAdmin(path: string, method: string): boolean {
|
||||
const methodUpper = method.toUpperCase();
|
||||
|
||||
for (const registered of this.registry) {
|
||||
const [regMethod, regPath] = registered.split(':');
|
||||
|
||||
if (regMethod !== methodUpper) continue;
|
||||
|
||||
// Check if path matches pattern
|
||||
if (this.matchesRoute(path, regPath)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static matchesRoute(path: string, pattern: string): boolean {
|
||||
// Convert pattern to regex
|
||||
// Handle both :param (Express/Hono style) and {param} (OpenAPI style)
|
||||
// e.g., '/api/competitors/:place_id' or '/api/competitors/{place_id}' -> /^\/api\/competitors\/[^\/]+$/
|
||||
const regexPattern = pattern
|
||||
.replace(/:[^\\/]+/g, '[^/]+') // Replace :param with regex
|
||||
.replace(/\{[^}]+\}/g, '[^/]+') // Replace {param} with regex
|
||||
.replace(/\//g, '\\/');
|
||||
|
||||
const regex = new RegExp(`^${regexPattern}\\/?$`);
|
||||
return regex.test(path);
|
||||
}
|
||||
}
|
||||
76
src/commons/supabase.ts
Normal file
76
src/commons/supabase.ts
Normal file
@ -0,0 +1,76 @@
|
||||
import { createClient, User } from '@supabase/supabase-js'
|
||||
import 'dotenv/config'
|
||||
|
||||
const supabaseUrl = process.env.SUPABASE_URL
|
||||
const supabaseKey = process.env.SUPABASE_SERVICE_KEY
|
||||
|
||||
import { logger } from './logger.js'
|
||||
|
||||
if (!supabaseUrl || !supabaseKey) {
|
||||
logger.error({
|
||||
hasUrl: !!supabaseUrl,
|
||||
hasKey: !!supabaseKey,
|
||||
env: process.env.NODE_ENV
|
||||
}, 'Missing Supabase environment variables');
|
||||
// process.exit(1) // Don't exit in test mode, throw instead
|
||||
if (process.env.NODE_ENV !== 'test') process.exit(1);
|
||||
throw new Error('Missing Supabase environment variables: URL or Key is undefined');
|
||||
}
|
||||
|
||||
export const supabase = createClient(supabaseUrl, supabaseKey)
|
||||
|
||||
// --- Auth Cache (in-process Map for speed) ---
|
||||
|
||||
const AUTH_CACHE_TTL = process.env.AUTH_CACHE_TTL ? parseInt(process.env.AUTH_CACHE_TTL) : 1000 * 60 * 1; // Default 1 minute
|
||||
|
||||
type AuthCacheEntry = { user: User | null; timestamp: number };
|
||||
const authMap = new Map<string, AuthCacheEntry>();
|
||||
|
||||
export const getUserCached = async (token: string): Promise<User | null> => {
|
||||
if (!token) return null;
|
||||
|
||||
const now = Date.now();
|
||||
const cached = authMap.get(token);
|
||||
if (cached && (now - cached.timestamp < AUTH_CACHE_TTL)) {
|
||||
return cached.user;
|
||||
}
|
||||
|
||||
try {
|
||||
const { data: { user }, error } = await supabase.auth.getUser(token);
|
||||
|
||||
if (error || !user) {
|
||||
authMap.set(token, { user: null, timestamp: now });
|
||||
return null;
|
||||
}
|
||||
|
||||
authMap.set(token, { user, timestamp: now });
|
||||
return user;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Auth Cache Error');
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/** Clear in-process auth cache (call after admin user ops, role changes, etc.) */
|
||||
export const flushAuthCache = (userId?: string) => {
|
||||
if (!userId) {
|
||||
authMap.clear();
|
||||
return;
|
||||
}
|
||||
// Remove entries for a specific user
|
||||
for (const [token, entry] of authMap) {
|
||||
if (entry.user?.id === userId) authMap.delete(token);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Test Supabase connection by attempting a simple query
|
||||
*/
|
||||
export async function testSupabaseConnection(): Promise<boolean> {
|
||||
try {
|
||||
const { error } = await supabase.from('products').select('id').limit(1)
|
||||
return !error
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
10
src/commons/types.ts
Normal file
10
src/commons/types.ts
Normal file
@ -0,0 +1,10 @@
|
||||
import { Env } from 'hono'
|
||||
|
||||
export interface HonoEnv extends Env {
|
||||
Variables: {
|
||||
jobId?: string;
|
||||
userId?: string;
|
||||
usageId?: string;
|
||||
skipUsageStatusUpdate?: boolean;
|
||||
}
|
||||
}
|
||||
238
src/commons/websocket.ts
Normal file
238
src/commons/websocket.ts
Normal file
@ -0,0 +1,238 @@
|
||||
import { WebSocketServer, WebSocket } from 'ws';
|
||||
import { Server } from 'http';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import chokidar from 'chokidar';
|
||||
|
||||
type MessageHandler = (ws: WebSocket, payload: any) => void;
|
||||
|
||||
export class WebSocketManager {
|
||||
private static instance: WebSocketManager;
|
||||
private wss: WebSocketServer | null = null;
|
||||
private handlers: Map<string, MessageHandler> = new Map();
|
||||
private writeQueue: Promise<void> = Promise.resolve();
|
||||
|
||||
private constructor() {
|
||||
// Register default handlers
|
||||
this.registerHandler('log', this.handleLog.bind(this));
|
||||
this.registerHandler('echo', (ws, payload) => ws.send(JSON.stringify({ type: 'echo', payload })));
|
||||
this.registerHandler('ping', (ws, payload) => ws.send(JSON.stringify({ type: 'pong', id: payload.id })));
|
||||
}
|
||||
|
||||
public static getInstance(): WebSocketManager {
|
||||
if (!WebSocketManager.instance) {
|
||||
WebSocketManager.instance = new WebSocketManager();
|
||||
}
|
||||
return WebSocketManager.instance;
|
||||
}
|
||||
|
||||
public init(server: Server) {
|
||||
if (this.wss) {
|
||||
console.warn('WebSocketServer already initialized');
|
||||
return;
|
||||
}
|
||||
|
||||
this.wss = new WebSocketServer({ server, path: '/ws' });
|
||||
|
||||
this.wss.on('connection', (ws: WebSocket) => {
|
||||
ws.on('message', (message: string) => {
|
||||
try {
|
||||
const data = JSON.parse(message.toString());
|
||||
const { command, ...payload } = data;
|
||||
|
||||
if (command && this.handlers.has(command)) {
|
||||
this.handlers.get(command)!(ws, payload);
|
||||
} else {
|
||||
console.warn('Unknown command:', command);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to parse message:', err);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('close', () => {
|
||||
|
||||
});
|
||||
|
||||
ws.on('error', (err) => {
|
||||
console.error('WebSocket error:', err);
|
||||
});
|
||||
});
|
||||
|
||||
this.initWatcher();
|
||||
}
|
||||
|
||||
private initWatcher() {
|
||||
// Watch for changes in canvas-page-new.json
|
||||
const logDir = path.join(process.cwd(), 'data');
|
||||
// Ensure log directory exists
|
||||
if (!fs.existsSync(logDir)) {
|
||||
try {
|
||||
fs.mkdirSync(logDir, { recursive: true });
|
||||
} catch (err) {
|
||||
console.error('Failed to create log directory for watcher:', err);
|
||||
}
|
||||
}
|
||||
|
||||
const handleFile = async (filePath: string) => {
|
||||
// Ignore output files (logs) to prevent infinite loops (Frontend -> Log -> Watcher -> Frontend -> Loop)
|
||||
const fileName = path.basename(filePath);
|
||||
const ext = path.extname(filePath).toLowerCase();
|
||||
|
||||
// Explicitly allow only specific JSON files (layouts) to trigger updates
|
||||
// Ignore everything else (logs, dumps, etc.)
|
||||
if (ext === '.json') {
|
||||
if (fileName !== 'canvas-page-latest-new.json' && fileName !== 'canvas-page-new.json') {
|
||||
return;
|
||||
}
|
||||
} else if (fileName.startsWith('canvas-html-latest')) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[Watcher] File detected: ${filePath}`);
|
||||
try {
|
||||
const ext = path.extname(filePath).toLowerCase();
|
||||
|
||||
if (ext === '.json') {
|
||||
const content = await fs.promises.readFile(filePath, 'utf-8');
|
||||
if (!content.trim()) return; // Ignore empty writes
|
||||
|
||||
try {
|
||||
const layoutData = JSON.parse(content);
|
||||
console.log('Broadcasting layout-update (json)...');
|
||||
this.broadcast({
|
||||
type: 'layout-update',
|
||||
data: layoutData
|
||||
});
|
||||
} catch (parseErr) {
|
||||
console.error(`Failed to parse watched JSON file: ${filePath}`, parseErr);
|
||||
}
|
||||
} else if (ext === '.html' || ext === '.md') {
|
||||
const content = await fs.promises.readFile(filePath, 'base64');
|
||||
console.log(`Broadcasting layout-update (${ext})...`);
|
||||
this.broadcast({
|
||||
type: 'layout-update',
|
||||
data: content
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to process watched file ${filePath}:`, err);
|
||||
}
|
||||
};
|
||||
|
||||
chokidar.watch(logDir, {
|
||||
persistent: true,
|
||||
ignoreInitial: false,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 100,
|
||||
pollInterval: 100
|
||||
}
|
||||
})
|
||||
.on('add', handleFile)
|
||||
.on('change', handleFile);
|
||||
}
|
||||
|
||||
public registerHandler(command: string, handler: MessageHandler) {
|
||||
this.handlers.set(command, handler);
|
||||
}
|
||||
|
||||
public broadcast(message: any) {
|
||||
if (!this.wss) return;
|
||||
const data = JSON.stringify(message);
|
||||
this.wss.clients.forEach((client) => {
|
||||
if (client.readyState === WebSocket.OPEN) {
|
||||
client.send(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private handleLog(ws: WebSocket, payload: any) {
|
||||
// Expected payload: { name: string, options?: { mode?: 'append'|'overwrite', format?: 'json'|'html'|'md' }, message: any, ...others }
|
||||
const { name, id, options, ...logData } = payload;
|
||||
|
||||
if (!name) {
|
||||
console.warn('Log command missing "name" field');
|
||||
return;
|
||||
}
|
||||
|
||||
const mode = options?.mode || 'append';
|
||||
const format = options?.format || 'json';
|
||||
|
||||
const logDir = path.join(process.cwd(), 'data');
|
||||
const extension = format === 'md' ? 'md' : format === 'html' ? 'html' : 'json';
|
||||
const logFile = path.join(logDir, `${name}.${extension}`);
|
||||
|
||||
// Ensure log directory exists
|
||||
if (!fs.existsSync(logDir)) {
|
||||
try {
|
||||
fs.mkdirSync(logDir, { recursive: true });
|
||||
} catch (err) {
|
||||
console.error('Failed to create log directory:', err);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Serialize writes using the queue
|
||||
this.writeQueue = this.writeQueue.then(async () => {
|
||||
try {
|
||||
if (format === 'json') {
|
||||
if (mode === 'overwrite') {
|
||||
// For overwrite (state capture), write only the message content if available
|
||||
const content = (logData.message !== undefined) ? logData.message : logData;
|
||||
const contentToWrite = JSON.stringify(content, null, 2);
|
||||
await fs.promises.writeFile(logFile, contentToWrite);
|
||||
} else {
|
||||
// For append (logging), read existing, parse, append to array, write back
|
||||
let records: any[] = [];
|
||||
|
||||
try {
|
||||
if (fs.existsSync(logFile)) {
|
||||
const fileContent = await fs.promises.readFile(logFile, 'utf-8');
|
||||
if (fileContent.trim()) {
|
||||
try {
|
||||
const parsed = JSON.parse(fileContent);
|
||||
if (Array.isArray(parsed)) {
|
||||
records = parsed;
|
||||
} else {
|
||||
records = [parsed];
|
||||
}
|
||||
} catch (e) {
|
||||
// Attempt to parse as NDJSON (newline delimited JSON)
|
||||
records = fileContent.split('\n')
|
||||
.filter(line => line.trim())
|
||||
.map(line => {
|
||||
try { return JSON.parse(line); } catch { return null; }
|
||||
})
|
||||
.filter(item => item !== null);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (readErr) {
|
||||
console.warn(`Failed to read log file ${logFile}, starting fresh.`, readErr);
|
||||
}
|
||||
|
||||
const logEntry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
...logData
|
||||
};
|
||||
records.push(logEntry);
|
||||
|
||||
await fs.promises.writeFile(logFile, JSON.stringify(records, null, 2));
|
||||
}
|
||||
} else {
|
||||
// HTML or MD
|
||||
const message = logData.message;
|
||||
const content = typeof message === 'string' ? message : JSON.stringify(message);
|
||||
|
||||
if (mode === 'append') {
|
||||
await fs.promises.appendFile(logFile, content + '\n');
|
||||
} else {
|
||||
await fs.promises.writeFile(logFile, content);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to write log file ${logFile}:`, err);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
10
src/config/blocklist.json
Normal file
10
src/config/blocklist.json
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"blockedIPs": [],
|
||||
"blockedUserIds": [
|
||||
"user_banned_123",
|
||||
"user_spam_456"
|
||||
],
|
||||
"blockedTokens": [
|
||||
"Bearer malicious_token_xyz"
|
||||
]
|
||||
}
|
||||
137
src/config/products.ts
Normal file
137
src/config/products.ts
Normal file
@ -0,0 +1,137 @@
|
||||
/**
|
||||
* Product and Action Mapping Configuration
|
||||
* Defines all trackable products, their actions, and associated metadata
|
||||
*/
|
||||
|
||||
export interface ProductActionConfig {
|
||||
endpoint: string;
|
||||
method: string;
|
||||
costUnits: number;
|
||||
cancellable?: boolean; // Whether this action can be cancelled
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface ProductConfig {
|
||||
[action: string]: ProductActionConfig;
|
||||
}
|
||||
|
||||
export const PRODUCT_ACTIONS: Record<string, ProductConfig> = {
|
||||
competitors: {
|
||||
search: {
|
||||
endpoint: '/api/competitors',
|
||||
method: 'GET',
|
||||
costUnits: 1.0,
|
||||
cancellable: true, // Search can be cancelled
|
||||
description: 'Search for competitors in a location',
|
||||
},
|
||||
get_details: {
|
||||
endpoint: '/api/competitors/:place_id',
|
||||
method: 'GET',
|
||||
costUnits: 0.0,
|
||||
cancellable: false, // Quick lookup, not cancellable
|
||||
description: 'Get details for a specific competitor',
|
||||
},
|
||||
stream: {
|
||||
endpoint: '/api/competitors/stream',
|
||||
method: 'GET',
|
||||
costUnits: 1.0, // Same cost as regular search
|
||||
cancellable: true,
|
||||
description: 'Stream competitors in real-time',
|
||||
},
|
||||
find_email: {
|
||||
endpoint: '/api/find/email/{place_id}',
|
||||
method: 'GET',
|
||||
costUnits: 2.0, // Higher cost due to Puppeteer usage
|
||||
cancellable: true, // Long-running, can be cancelled
|
||||
description: 'Find email addresses for a business using Puppeteer',
|
||||
},
|
||||
},
|
||||
images: {
|
||||
upload: {
|
||||
endpoint: '/api/images',
|
||||
method: 'POST',
|
||||
costUnits: 2.0,
|
||||
cancellable: true,
|
||||
description: 'Upload an image',
|
||||
},
|
||||
get: {
|
||||
endpoint: '/api/images/:id',
|
||||
method: 'GET',
|
||||
costUnits: 0.05,
|
||||
cancellable: false,
|
||||
description: 'Retrieve an image',
|
||||
},
|
||||
update: {
|
||||
endpoint: '/api/images/:id',
|
||||
method: 'PUT',
|
||||
costUnits: 1.5,
|
||||
cancellable: false,
|
||||
description: 'Update image metadata',
|
||||
},
|
||||
},
|
||||
mock: {
|
||||
job: {
|
||||
endpoint: '/api/mock/job',
|
||||
method: 'POST',
|
||||
costUnits: 0.0,
|
||||
cancellable: true,
|
||||
description: 'Mock job for testing',
|
||||
},
|
||||
},
|
||||
// Add more products here as they are developed
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Match a request path and method to a product and action
|
||||
*/
|
||||
export function identifyProductAction(path: string, method: string): {
|
||||
product: string | null;
|
||||
action: string | null;
|
||||
config: ProductActionConfig | null;
|
||||
} {
|
||||
for (const [product, actions] of Object.entries(PRODUCT_ACTIONS)) {
|
||||
for (const [action, config] of Object.entries(actions)) {
|
||||
if (matchesRoute(path, config.endpoint) && method === config.method) {
|
||||
return { product, action, config };
|
||||
}
|
||||
}
|
||||
}
|
||||
return { product: null, action: null, config: null };
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path matches a route pattern (supports :param syntax)
|
||||
*/
|
||||
function matchesRoute(path: string, pattern: string): boolean {
|
||||
// Convert pattern to regex
|
||||
// e.g., '/api/competitors/:place_id' or '/api/competitors/{place_id}' -> /^\/api\/competitors\/[^\/]+$/
|
||||
const regexPattern = pattern
|
||||
.replace(/:[^\/]+/g, '[^/]+') // Replace :param with regex
|
||||
.replace(/\{[^}]+\}/g, '[^/]+') // Replace {param} with regex
|
||||
.replace(/\//g, '\\/'); // Escape slashes
|
||||
|
||||
// Allow optional trailing slash
|
||||
const regex = new RegExp(`^${regexPattern}\\/?$`);
|
||||
return regex.test(path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all products
|
||||
*/
|
||||
export function getAllProducts(): string[] {
|
||||
return Object.keys(PRODUCT_ACTIONS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all actions for a product
|
||||
*/
|
||||
export function getProductActions(product: string): string[] {
|
||||
return Object.keys(PRODUCT_ACTIONS[product] || {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get configuration for a specific product action
|
||||
*/
|
||||
export function getActionConfig(product: string, action: string): ProductActionConfig | null {
|
||||
return PRODUCT_ACTIONS[product]?.[action] || null;
|
||||
}
|
||||
1
src/constants.ts
Normal file
1
src/constants.ts
Normal file
@ -0,0 +1 @@
|
||||
export const TEST_POST_ID = '8c1d567a-6909-4e43-b432-bd359bb10fc5';
|
||||
32
src/endpoints/__tests__/admin.test.ts
Normal file
32
src/endpoints/__tests__/admin.test.ts
Normal file
@ -0,0 +1,32 @@
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { app } from '../../index.js' // Adjust path if needed
|
||||
import { AdminEndpointRegistry } from '../../commons/registry.js'
|
||||
|
||||
describe('Admin Restart Endpoint', () => {
|
||||
beforeEach(() => {
|
||||
// Mock process.exit to prevent killing the test runner
|
||||
vi.spyOn(process, 'exit').mockImplementation((code) => {
|
||||
// console.log(`Mock process.exit(${code}) called`)
|
||||
return undefined as never
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('should be registered as an admin endpoint', () => {
|
||||
expect(AdminEndpointRegistry.isAdmin('/api/admin/system/restart', 'POST')).toBe(true)
|
||||
})
|
||||
|
||||
it('should return 401 if unauthenticated', async () => {
|
||||
const res = await app.request('/api/admin/system/restart', { method: 'POST' })
|
||||
expect(res.status).toBe(401)
|
||||
const body = await res.json()
|
||||
expect(body).toEqual({ error: 'Unauthorized - Authentication required' })
|
||||
})
|
||||
|
||||
// Mocking a full admin user flow is complex without mocking Supabase,
|
||||
// but verifying 401 proves that the middleware is intercepting the request.
|
||||
})
|
||||
193
src/endpoints/admin.ts
Normal file
193
src/endpoints/admin.ts
Normal file
@ -0,0 +1,193 @@
|
||||
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi'
|
||||
import { logger } from '../commons/logger.js'
|
||||
import { getBanList, unbanIP, unbanUser, getViolationStats } from '../middleware/autoBan.js'
|
||||
|
||||
export const restartRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/admin/system/restart',
|
||||
tags: ['Admin'],
|
||||
summary: 'Restart the server',
|
||||
description: 'Exits the process with code 0, relying on systemd to restart it.',
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Restart initiated',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
message: z.string(),
|
||||
pid: z.number()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export const restartHandler = async (c: any) => {
|
||||
const pid = process.pid
|
||||
// Use a slight delay to allow the response to be sent
|
||||
setTimeout(() => {
|
||||
logger.info('Exiting process for restart...')
|
||||
process.exit(0)
|
||||
}, 1000)
|
||||
|
||||
return c.json({
|
||||
message: 'Server is restarting...',
|
||||
pid
|
||||
})
|
||||
}
|
||||
|
||||
// Ban List Routes
|
||||
export const getBanListRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/admin/bans',
|
||||
tags: ['Admin'],
|
||||
summary: 'Get current ban list',
|
||||
description: 'Returns all auto-banned IPs, users, and tokens',
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Ban list retrieved',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
bannedIPs: z.array(z.string()),
|
||||
bannedUserIds: z.array(z.string()),
|
||||
bannedTokens: z.array(z.string())
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export const getBanListHandler = async (c: any) => {
|
||||
const banList = getBanList()
|
||||
logger.info({ user: c.get('user') }, 'Admin retrieved ban list')
|
||||
return c.json(banList)
|
||||
}
|
||||
|
||||
export const unbanIPRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/admin/bans/unban-ip',
|
||||
tags: ['Admin'],
|
||||
summary: 'Unban an IP address',
|
||||
description: 'Removes an IP from the auto-ban list',
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
ip: z.string()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'IP unbanned successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export const unbanIPHandler = async (c: any) => {
|
||||
const { ip } = await c.req.json()
|
||||
const success = unbanIP(ip)
|
||||
logger.info({ user: c.get('user'), ip, success }, 'Admin attempted to unban IP')
|
||||
|
||||
return c.json({
|
||||
success,
|
||||
message: success ? `IP ${ip} has been unbanned` : `IP ${ip} was not found in ban list`
|
||||
})
|
||||
}
|
||||
|
||||
export const unbanUserRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/admin/bans/unban-user',
|
||||
tags: ['Admin'],
|
||||
summary: 'Unban a user',
|
||||
description: 'Removes a user from the auto-ban list',
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
userId: z.string()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'User unbanned successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export const unbanUserHandler = async (c: any) => {
|
||||
const { userId } = await c.req.json()
|
||||
const success = unbanUser(userId)
|
||||
logger.info({ user: c.get('user'), userId, success }, 'Admin attempted to unban user')
|
||||
|
||||
return c.json({
|
||||
success,
|
||||
message: success ? `User ${userId} has been unbanned` : `User ${userId} was not found in ban list`
|
||||
})
|
||||
}
|
||||
|
||||
export const getViolationStatsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/admin/bans/violations',
|
||||
tags: ['Admin'],
|
||||
summary: 'Get violation statistics',
|
||||
description: 'Returns current violation tracking data',
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Violation stats retrieved',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
totalViolations: z.number(),
|
||||
violations: z.array(z.object({
|
||||
key: z.string(),
|
||||
count: z.number(),
|
||||
firstViolation: z.number(),
|
||||
lastViolation: z.number()
|
||||
}))
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export const getViolationStatsHandler = async (c: any) => {
|
||||
const stats = getViolationStats()
|
||||
return c.json(stats)
|
||||
}
|
||||
|
||||
export const registerAdminRoutes = (app: OpenAPIHono) => {
|
||||
app.openapi(restartRoute, restartHandler)
|
||||
app.openapi(getBanListRoute, getBanListHandler)
|
||||
app.openapi(unbanIPRoute, unbanIPHandler)
|
||||
app.openapi(unbanUserRoute, unbanUserHandler)
|
||||
app.openapi(getViolationStatsRoute, getViolationStatsHandler)
|
||||
}
|
||||
304
src/endpoints/boss.ts
Normal file
304
src/endpoints/boss.ts
Normal file
@ -0,0 +1,304 @@
|
||||
import { createRoute, OpenAPIHono, z } from '@hono/zod-openapi';
|
||||
import { RouteHandler } from '@hono/zod-openapi';
|
||||
import { boss } from '../jobs/boss/client.js';
|
||||
import { QUEUE_MOCK_JOB } from '../jobs/boss/workers.js';
|
||||
import { HonoEnv } from '../commons/types.js';
|
||||
|
||||
const tags = ['PgBoss'];
|
||||
|
||||
export const postBossJobRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/boss/job',
|
||||
tags,
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
delayMs: z.number().default(100),
|
||||
shouldFail: z.boolean().default(false),
|
||||
retryLimit: z.number().optional()
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
jobId: z.string().nullable(),
|
||||
message: z.string()
|
||||
}),
|
||||
},
|
||||
},
|
||||
description: 'PgBoss job started',
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ error: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Server error',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const postBossJobHandler: RouteHandler<typeof postBossJobRoute, HonoEnv> = async (c) => {
|
||||
if (!boss) {
|
||||
// Check if there was an initialization error we can report
|
||||
const { bossInitError } = await import('../jobs/boss/client.js');
|
||||
return c.json({ error: `PgBoss not initialized. Init error: ${bossInitError}` }, 500);
|
||||
}
|
||||
|
||||
const { delayMs, shouldFail, retryLimit } = c.req.valid('json');
|
||||
const payload = { delayMs, shouldFail };
|
||||
const options = retryLimit !== undefined ? { retryLimit } : {};
|
||||
try {
|
||||
const jobId = await boss.send(QUEUE_MOCK_JOB, payload, options);
|
||||
return c.json({ jobId, message: 'Job submitted to PgBoss' }, 200);
|
||||
} catch (error: any) {
|
||||
return c.json({ error: error.message }, 500);
|
||||
}
|
||||
};
|
||||
|
||||
export const getBossJobRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/boss/job/{id}',
|
||||
tags,
|
||||
request: {
|
||||
params: z.object({
|
||||
id: z.string(),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
data: z.any(),
|
||||
state: z.string(),
|
||||
createdOn: z.string().optional(),
|
||||
startedOn: z.string().optional(),
|
||||
completedOn: z.string().optional(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
description: 'Job status',
|
||||
},
|
||||
404: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ error: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Job not found',
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ error: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Server error',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const getBossJobHandler: RouteHandler<typeof getBossJobRoute, HonoEnv> = async (c) => {
|
||||
const { id } = c.req.valid('param');
|
||||
|
||||
// Use pg directly to bypass PostgREST schema permissions
|
||||
const { Client } = await import('pg');
|
||||
const client = new Client({ connectionString: process.env.DATABASE_URL, });
|
||||
|
||||
try {
|
||||
await client.connect();
|
||||
const result = await client.query('SELECT * FROM pgboss.job WHERE id = $1', [id]);
|
||||
const job = result.rows[0];
|
||||
|
||||
if (!job) {
|
||||
return c.json({ error: 'Job not found' }, 404);
|
||||
}
|
||||
|
||||
return c.json({
|
||||
id: job.id,
|
||||
name: job.name,
|
||||
data: job.data,
|
||||
state: job.state,
|
||||
createdOn: job.createdon,
|
||||
startedOn: job.startedon,
|
||||
completedOn: job.completedon,
|
||||
output: job.output,
|
||||
}, 200);
|
||||
} catch (error: any) {
|
||||
console.error('Error in getBossJobHandler:', error);
|
||||
return c.json({ error: error.message }, 500);
|
||||
} finally {
|
||||
await client.end().catch(() => { });
|
||||
}
|
||||
};
|
||||
|
||||
export const cancelBossJobRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/boss/job/{id}/cancel',
|
||||
tags,
|
||||
request: {
|
||||
params: z.object({
|
||||
id: z.string(),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ message: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Job cancelled',
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ error: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Server error',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const cancelBossJobHandler: RouteHandler<typeof cancelBossJobRoute, HonoEnv> = async (c) => {
|
||||
if (!boss) return c.json({ error: 'PgBoss not initialized' }, 500);
|
||||
const { id } = c.req.valid('param');
|
||||
try {
|
||||
await boss.cancel(QUEUE_MOCK_JOB, id);
|
||||
return c.json({ message: 'Job cancelled' }, 200);
|
||||
} catch (error: any) {
|
||||
return c.json({ error: error.message }, 500);
|
||||
}
|
||||
};
|
||||
|
||||
export const resumeBossJobRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/boss/job/{id}/resume',
|
||||
tags,
|
||||
request: {
|
||||
params: z.object({
|
||||
id: z.string(),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ message: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Job resumed',
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ error: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Server error',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const resumeBossJobHandler: RouteHandler<typeof resumeBossJobRoute, HonoEnv> = async (c) => {
|
||||
if (!boss) return c.json({ error: 'PgBoss not initialized' }, 500);
|
||||
const { id } = c.req.valid('param');
|
||||
try {
|
||||
await boss.resume(QUEUE_MOCK_JOB, id);
|
||||
return c.json({ message: 'Job resumed' }, 200);
|
||||
} catch (error: any) {
|
||||
return c.json({ error: error.message }, 500);
|
||||
}
|
||||
};
|
||||
|
||||
export const completeBossJobRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/boss/job/{id}/complete',
|
||||
tags,
|
||||
request: {
|
||||
params: z.object({
|
||||
id: z.string(),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ message: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Job completed',
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ error: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Server error',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const completeBossJobHandler: RouteHandler<typeof completeBossJobRoute, HonoEnv> = async (c) => {
|
||||
if (!boss) return c.json({ error: 'PgBoss not initialized' }, 500);
|
||||
const { id } = c.req.valid('param');
|
||||
try {
|
||||
await boss.complete(QUEUE_MOCK_JOB, id);
|
||||
return c.json({ message: 'Job completed' }, 200);
|
||||
} catch (error: any) {
|
||||
return c.json({ error: error.message }, 500);
|
||||
}
|
||||
};
|
||||
|
||||
export const failBossJobRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/boss/job/{id}/fail',
|
||||
tags,
|
||||
request: {
|
||||
params: z.object({
|
||||
id: z.string(),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ message: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Job failed',
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ error: z.string() }),
|
||||
},
|
||||
},
|
||||
description: 'Server error',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const failBossJobHandler: RouteHandler<typeof failBossJobRoute, HonoEnv> = async (c) => {
|
||||
if (!boss) return c.json({ error: 'PgBoss not initialized' }, 500);
|
||||
const { id } = c.req.valid('param');
|
||||
try {
|
||||
await boss.fail(QUEUE_MOCK_JOB, id);
|
||||
return c.json({ message: 'Job failed' }, 200);
|
||||
} catch (error: any) {
|
||||
return c.json({ error: error.message }, 500);
|
||||
}
|
||||
};
|
||||
87
src/endpoints/stream.ts
Normal file
87
src/endpoints/stream.ts
Normal file
@ -0,0 +1,87 @@
|
||||
import { createRouteBody } from '../products/serving/routes.js';
|
||||
import { Context } from 'hono';
|
||||
import { streamSSE } from 'hono/streaming';
|
||||
import { z } from '@hono/zod-openapi';
|
||||
import { appEvents, AppEvent } from '../events.js';
|
||||
import { logger } from '../commons/logger.js';
|
||||
|
||||
export const getStreamRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/stream',
|
||||
['System'],
|
||||
'Stream System Events',
|
||||
'Subscribe to real-time updates for categories, posts, and pages.',
|
||||
undefined,
|
||||
{
|
||||
200: {
|
||||
description: 'Event Stream',
|
||||
content: {
|
||||
'text/event-stream': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
// Track active connections
|
||||
const connectedClients = new Set<{
|
||||
id: string;
|
||||
stream: any; // Hono SSEStreamingApi
|
||||
}>();
|
||||
|
||||
// Single listener for the entire application
|
||||
const broadcastAppUpdate = async (event: AppEvent) => {
|
||||
const payload = JSON.stringify(event);
|
||||
for (const client of connectedClients) {
|
||||
try {
|
||||
await client.stream.writeSSE({
|
||||
event: event.kind,
|
||||
data: payload
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error({ err, clientId: client.id }, 'Error broadcasting to stream');
|
||||
// Client will be removed by the onAbort handler in the stream handler
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Subscribe once
|
||||
appEvents.on('app-update', broadcastAppUpdate);
|
||||
|
||||
export const streamHandler = async (c: Context) => {
|
||||
return streamSSE(c, async (stream) => {
|
||||
const id = crypto.randomUUID();
|
||||
const client = { id, stream };
|
||||
|
||||
connectedClients.add(client);
|
||||
// Send initial connection message
|
||||
await stream.writeSSE({
|
||||
event: 'connected',
|
||||
data: JSON.stringify({ message: 'Connected to event stream', clientId: id })
|
||||
});
|
||||
|
||||
// Keep connection alive & handle cleanup
|
||||
let interval: NodeJS.Timeout;
|
||||
const heartbeatInterval = parseInt(process.env.STREAM_HEARTBEAT_INTERVAL_MS || '30000', 10);
|
||||
|
||||
// Send heartbeat to prevent timeouts
|
||||
interval = setInterval(async () => {
|
||||
try {
|
||||
await stream.writeSSE({ event: 'ping', data: '' });
|
||||
} catch (e) {
|
||||
// connection likely closed
|
||||
}
|
||||
}, heartbeatInterval);
|
||||
|
||||
// Wait until the stream is aborted
|
||||
await new Promise<void>((resolve) => {
|
||||
stream.onAbort(() => {
|
||||
connectedClients.delete(client);
|
||||
clearInterval(interval);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
43
src/events.ts
Normal file
43
src/events.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import { EventEmitter } from 'events';
|
||||
|
||||
|
||||
export type EventType = 'category' | 'post' | 'page' | 'system' | string;
|
||||
|
||||
export interface AppEvent {
|
||||
type: EventType;
|
||||
kind: 'cache' | 'system' | 'chat' | 'other';
|
||||
action: 'create' | 'update' | 'delete';
|
||||
id?: string | null; // Entity ID for per-item invalidation
|
||||
data: any;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
class AppEvents extends EventEmitter {
|
||||
private static instance: AppEvents;
|
||||
|
||||
private constructor() {
|
||||
super();
|
||||
// this.setMaxListeners(10); // Default is fine now
|
||||
}
|
||||
|
||||
public static getInstance(): AppEvents {
|
||||
if (!AppEvents.instance) {
|
||||
AppEvents.instance = new AppEvents();
|
||||
}
|
||||
return AppEvents.instance;
|
||||
}
|
||||
|
||||
public emitUpdate(type: EventType, action: 'create' | 'update' | 'delete', data: any, kind: AppEvent['kind'] = 'cache') {
|
||||
const event: AppEvent = {
|
||||
kind,
|
||||
type,
|
||||
action,
|
||||
id: data?.id ?? null,
|
||||
data,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
this.emit('app-update', event);
|
||||
}
|
||||
}
|
||||
|
||||
export const appEvents = AppEvents.getInstance();
|
||||
271
src/index.ts
Normal file
271
src/index.ts
Normal file
@ -0,0 +1,271 @@
|
||||
|
||||
import { z } from './zod-setup.js'
|
||||
import { serve } from '@hono/node-server'
|
||||
import { OpenAPIHono } from '@hono/zod-openapi'
|
||||
import { swaggerUI } from '@hono/swagger-ui'
|
||||
import { Scalar } from '@scalar/hono-api-reference'
|
||||
import { cors } from 'hono/cors'
|
||||
import dotenv from 'dotenv'
|
||||
import path from 'path'
|
||||
|
||||
// Load environment variables based on NODE_ENV
|
||||
const envFile = process.env.NODE_ENV === 'production' ? '.env.production' : '.env'
|
||||
dotenv.config({ path: path.resolve(process.cwd(), envFile) })
|
||||
|
||||
import { logger } from './commons/logger.js'
|
||||
import { WebSocketManager } from './commons/websocket.js';
|
||||
|
||||
// Import middleware
|
||||
import { blocklistMiddleware } from './middleware/blocklist.js'
|
||||
import { autoBanMiddleware } from './middleware/autoBan.js'
|
||||
import { optionalAuthMiddleware, adminMiddleware } from './middleware/auth.js'
|
||||
import { analyticsMiddleware } from './middleware/analytics.js'
|
||||
import { apiRateLimiter } from './middleware/rateLimiter.js'
|
||||
|
||||
import { compress } from 'hono/compress'
|
||||
import { secureHeaders } from 'hono/secure-headers'
|
||||
|
||||
// Import endpoints
|
||||
|
||||
import { registerProductRoutes, startProducts } from './products/registry.js'
|
||||
|
||||
const app = new OpenAPIHono()
|
||||
// Middleware
|
||||
app.use('/*', cors({
|
||||
origin: '*',
|
||||
allowMethods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'],
|
||||
allowHeaders: ['Content-Type', 'Authorization', 'x-stainless-os', 'x-stainless-lang', 'x-stainless-arch', 'x-stainless-package-version', 'x-stainless-runtime', 'x-stainless-runtime-version', 'x-stainless-helper-method', 'x-stainless-retry-count'],
|
||||
exposeHeaders: ['Content-Length', 'X-Cache'],
|
||||
maxAge: 600,
|
||||
credentials: true,
|
||||
}))
|
||||
|
||||
// Apply blocklist to all API routes (before rate limiting)
|
||||
//app.use('/api/*', blocklistMiddleware)
|
||||
// Apply auto-ban middleware (checks ban.json for auto-banned IPs/users)
|
||||
// app.use('/api/*', autoBanMiddleware)
|
||||
// Apply Analytics (tracks requests to file)
|
||||
app.use('*', analyticsMiddleware)
|
||||
|
||||
// Apply Authentication & Authorization
|
||||
app.use('/api/*', optionalAuthMiddleware)
|
||||
app.use('/api/*', adminMiddleware)
|
||||
// app.use('/api/*', apiRateLimiter)
|
||||
|
||||
// Apply compression to all API routes
|
||||
// Apply compression to all routes (API + Static Assets)
|
||||
app.use('*', compress())
|
||||
app.use(secureHeaders({
|
||||
crossOriginResourcePolicy: false,
|
||||
crossOriginOpenerPolicy: false,
|
||||
crossOriginEmbedderPolicy: false,
|
||||
xFrameOptions: false,
|
||||
contentSecurityPolicy: {
|
||||
frameAncestors: ["'self'", "*"]
|
||||
}
|
||||
}))
|
||||
|
||||
|
||||
// Register API routes
|
||||
import { createLogRoutes, createLogHandlers } from './commons/log-routes-factory.js'
|
||||
import { registerAssetRoutes } from './serve-assets.js'
|
||||
|
||||
// System Logs
|
||||
const { getRoute: sysGetLogRoute, streamRoute: sysStreamLogRoute } = createLogRoutes('System', '/api/logs/system');
|
||||
const { getHandler: sysGetLogHandler, streamHandler: sysStreamLogHandler } = createLogHandlers(path.join(process.cwd(), 'app.log'));
|
||||
|
||||
app.openapi(sysGetLogRoute, sysGetLogHandler);
|
||||
app.openapi(sysStreamLogRoute, sysStreamLogHandler);
|
||||
|
||||
// Register Product Routes
|
||||
|
||||
await registerProductRoutes(app)
|
||||
// Initialize Products
|
||||
// Products initialized after PgBoss check below
|
||||
|
||||
|
||||
// API Documentation (Development Only)
|
||||
const isDevelopment = process.env.NODE_ENV !== 'production';
|
||||
|
||||
if (isDevelopment) {
|
||||
logger.info('Registering API documentation endpoints (development mode)');
|
||||
|
||||
// Swagger UI
|
||||
app.doc31('/doc', {
|
||||
openapi: '3.1.0',
|
||||
info: {
|
||||
version: '1.0.0',
|
||||
title: 'Images API',
|
||||
},
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: 'http',
|
||||
scheme: 'bearer',
|
||||
bearerFormat: 'JWT',
|
||||
},
|
||||
},
|
||||
},
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
},
|
||||
],
|
||||
} as any);
|
||||
|
||||
// Swagger UI
|
||||
app.get('/ui', swaggerUI({ url: '/doc' }));
|
||||
|
||||
// Scalar API Reference
|
||||
app.get('/reference', Scalar({
|
||||
spec: {
|
||||
url: '/doc',
|
||||
},
|
||||
authentication: {
|
||||
preferredSecurityScheme: 'bearerAuth',
|
||||
httpBearer: {
|
||||
token: process.env.SCALAR_AUTH_TOKEN || '',
|
||||
},
|
||||
},
|
||||
} as any));
|
||||
|
||||
// Alternative: API Reference at /api/reference
|
||||
app.get('/api/reference', Scalar({
|
||||
spec: {
|
||||
url: '/doc',
|
||||
},
|
||||
authentication: {
|
||||
preferredSecurityScheme: 'bearerAuth',
|
||||
httpBearer: {
|
||||
token: process.env.SCALAR_AUTH_TOKEN || '',
|
||||
}
|
||||
},
|
||||
} as any));
|
||||
} else {
|
||||
logger.info('API documentation endpoints disabled (production mode)');
|
||||
}
|
||||
|
||||
import {
|
||||
postBossJobRoute, postBossJobHandler,
|
||||
getBossJobRoute, getBossJobHandler,
|
||||
cancelBossJobRoute, cancelBossJobHandler,
|
||||
resumeBossJobRoute, resumeBossJobHandler,
|
||||
completeBossJobRoute, completeBossJobHandler,
|
||||
failBossJobRoute, failBossJobHandler
|
||||
} from './endpoints/boss.js'
|
||||
|
||||
import { startBoss } from './jobs/boss/client.js'
|
||||
import { registerMockWorkers } from './jobs/boss/workers.js'
|
||||
|
||||
|
||||
// Register PgBoss routes
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(postBossJobRoute, postBossJobHandler)
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(getBossJobRoute, getBossJobHandler)
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(cancelBossJobRoute, cancelBossJobHandler)
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(resumeBossJobRoute, resumeBossJobHandler)
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(completeBossJobRoute, completeBossJobHandler)
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(failBossJobRoute, failBossJobHandler)
|
||||
|
||||
// Register Streaming Route
|
||||
import { getStreamRoute, streamHandler } from './endpoints/stream.js'
|
||||
app.openapi(getStreamRoute, streamHandler)
|
||||
|
||||
// Register Admin Routes
|
||||
import { registerAdminRoutes } from './endpoints/admin.js'
|
||||
import { AdminEndpointRegistry } from './commons/registry.js'
|
||||
|
||||
// Register restart endpoint as admin-only
|
||||
AdminEndpointRegistry.register('/api/admin/system/restart', 'POST')
|
||||
// Register ban management endpoints as admin-only
|
||||
AdminEndpointRegistry.register('/api/admin/bans', 'GET')
|
||||
AdminEndpointRegistry.register('/api/admin/bans/unban-ip', 'POST')
|
||||
AdminEndpointRegistry.register('/api/admin/bans/unban-user', 'POST')
|
||||
AdminEndpointRegistry.register('/api/admin/bans/violations', 'GET')
|
||||
AdminEndpointRegistry.register('/api/analytics', 'GET')
|
||||
AdminEndpointRegistry.register('/api/analytics/stream', 'GET')
|
||||
AdminEndpointRegistry.register('/api/analytics', 'DELETE')
|
||||
|
||||
|
||||
registerAdminRoutes(app)
|
||||
|
||||
// Register Asset Routes (Static files, SW, SPA fallback)
|
||||
// IMPORTANT: This MUST be registered AFTER all API routes to prevent the catch-all from intercepting API calls
|
||||
registerAssetRoutes(app);
|
||||
|
||||
|
||||
// Initialize PgBoss
|
||||
// Initialize PgBoss and Products
|
||||
try {
|
||||
const boss = await startBoss();
|
||||
if (boss) {
|
||||
registerMockWorkers();
|
||||
try {
|
||||
await startProducts(boss);
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to init products with Boss');
|
||||
}
|
||||
} else {
|
||||
// Fallback: Start products without Boss
|
||||
logger.info('Starting products without PgBoss');
|
||||
await startProducts();
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to init PgBoss');
|
||||
// Fallback: Start products without Boss on error
|
||||
logger.info('Starting products without PgBoss (after error)');
|
||||
await startProducts();
|
||||
}
|
||||
|
||||
const port = parseInt(process.env.PORT || '3333', 10)
|
||||
logger.info(`Server is running on port ${port}`)
|
||||
// Only start the server if not in test mode
|
||||
if (process.env.NODE_ENV !== 'test' && !process.env.VITEST) {
|
||||
const server = serve({
|
||||
fetch: app.fetch,
|
||||
port
|
||||
})
|
||||
|
||||
// Initialize WebSocket Server
|
||||
if (process.env.ENABLE_WEBSOCKETS === 'true') {
|
||||
WebSocketManager.getInstance().init(server as any);
|
||||
}
|
||||
|
||||
let isShuttingDown = false;
|
||||
const gracefulShutdown = (signal: string) => {
|
||||
if (isShuttingDown) {
|
||||
logger.warn('Already shutting down...');
|
||||
return;
|
||||
}
|
||||
isShuttingDown = true;
|
||||
|
||||
// Force exit after a timeout
|
||||
const timeout = setTimeout(() => {
|
||||
logger.warn('Shutdown timed out. Forcing exit.');
|
||||
process.exit(1);
|
||||
}, 5000);
|
||||
|
||||
server.close(async (err) => {
|
||||
if (err) {
|
||||
logger.error({ err }, 'Error closing HTTP server');
|
||||
} else {
|
||||
console.log('HTTP server closed.');
|
||||
}
|
||||
|
||||
clearTimeout(timeout);
|
||||
console.log('Gracefully shut down.');
|
||||
process.exit(err ? 1 : 0);
|
||||
});
|
||||
};
|
||||
|
||||
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
|
||||
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
|
||||
process.on('SIGBREAK', () => gracefulShutdown('SIGBREAK')); // For Windows
|
||||
}
|
||||
|
||||
export { app }
|
||||
1619
src/integrations/supabase/schemas.ts
Normal file
1619
src/integrations/supabase/schemas.ts
Normal file
File diff suppressed because it is too large
Load Diff
1767
src/integrations/supabase/types.ts
Normal file
1767
src/integrations/supabase/types.ts
Normal file
File diff suppressed because it is too large
Load Diff
58
src/jobs/boss/AbstractWorker.ts
Normal file
58
src/jobs/boss/AbstractWorker.ts
Normal file
@ -0,0 +1,58 @@
|
||||
import { Job } from 'pg-boss';
|
||||
import { supabase } from '../../commons/supabase.js';
|
||||
import { logger } from '../../commons/logger.js';
|
||||
import EventEmitter from 'events';
|
||||
|
||||
|
||||
export abstract class AbstractWorker<TData> {
|
||||
abstract readonly queueName: string;
|
||||
readonly queueOptions?: any; // pg-boss QueueOptions
|
||||
protected emitter?: EventEmitter;
|
||||
|
||||
// Cost calculation can be static or dynamic based on results
|
||||
abstract calculateCost(job: Job<TData>, result?: any): number;
|
||||
|
||||
// The core business logic
|
||||
protected abstract process(job: Job<TData>): Promise<any>;
|
||||
|
||||
// Main entry point for pg-boss
|
||||
public async handler(jobOrJobs: Job<TData> | Job<TData>[]) {
|
||||
|
||||
const job = Array.isArray(jobOrJobs) ? jobOrJobs[0] : jobOrJobs;
|
||||
|
||||
// Safety check
|
||||
if (!job) {
|
||||
logger.error(`[${this.queueName}] Received null or empty job`);
|
||||
return;
|
||||
}
|
||||
|
||||
const jobId = job.id;
|
||||
const usageId = (job.data as any)?.usageId;
|
||||
|
||||
logger.info(`[${this.queueName}] Starting job ${jobId}`);
|
||||
|
||||
try {
|
||||
// 2. Execute Business Logic
|
||||
const result = await this.process(job);
|
||||
|
||||
// 3. Calculate Cost
|
||||
const cost = this.calculateCost(job, result);
|
||||
|
||||
if (this.emitter) {
|
||||
this.emitter.emit('job:complete', {
|
||||
jobId,
|
||||
result
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
|
||||
} catch (error: any) {
|
||||
|
||||
logger.error({ err: error }, `[${this.queueName}] Job failed`);
|
||||
|
||||
throw error; // Let pg-boss handle retry/failure
|
||||
}
|
||||
}
|
||||
}
|
||||
44
src/jobs/boss/client.ts
Normal file
44
src/jobs/boss/client.ts
Normal file
@ -0,0 +1,44 @@
|
||||
import { PgBoss } from 'pg-boss';
|
||||
import { logger } from '../../commons/logger.js';
|
||||
|
||||
const connectionString = process.env.DATABASE_URL;
|
||||
|
||||
if (!connectionString) {
|
||||
logger.warn('DATABASE_URL not found, PgBoss will not be initialized');
|
||||
}
|
||||
|
||||
export const boss = connectionString ? new PgBoss({
|
||||
connectionString,
|
||||
__test__enableSpies: true
|
||||
} as any) : null;
|
||||
export let bossInitError: Error | null = null;
|
||||
|
||||
export async function startBoss() {
|
||||
if (!boss) return;
|
||||
|
||||
boss.on('error', (error: Error) => logger.error({ error }, 'PgBoss error'));
|
||||
|
||||
try {
|
||||
await boss.start();
|
||||
logger.info('PgBoss started');
|
||||
return boss;
|
||||
} catch (error: any) {
|
||||
bossInitError = error;
|
||||
logger.error({ error }, 'Failed to start PgBoss');
|
||||
const fs = await import('fs');
|
||||
fs.writeFileSync('debug_pgboss_error.txt', JSON.stringify(error, Object.getOwnPropertyNames(error)));
|
||||
}
|
||||
}
|
||||
|
||||
export async function stopBoss() {
|
||||
if (!boss) {
|
||||
console.info('PgBoss not initialized, skipping stop.')
|
||||
return
|
||||
}
|
||||
try {
|
||||
await boss.stop({ timeout: 5000 }); // 5s timeout
|
||||
console.info('PgBoss stopped');
|
||||
} catch (error) {
|
||||
console.error({ error }, 'Failed to stop PgBoss');
|
||||
}
|
||||
}
|
||||
25
src/jobs/boss/registry.ts
Normal file
25
src/jobs/boss/registry.ts
Normal file
@ -0,0 +1,25 @@
|
||||
import { Job } from 'pg-boss';
|
||||
|
||||
type WorkerHandler = (job: Job<any>) => Promise<any>;
|
||||
|
||||
interface WorkerConfig {
|
||||
queueName: string;
|
||||
handler: WorkerHandler;
|
||||
options?: any;
|
||||
}
|
||||
|
||||
export class WorkerRegistry {
|
||||
private static workers: Map<string, WorkerConfig> = new Map();
|
||||
|
||||
static register(queueName: string, handler: WorkerHandler, options?: any) {
|
||||
this.workers.set(queueName, { queueName, handler, options });
|
||||
}
|
||||
|
||||
static get(queueName: string): WorkerConfig | undefined {
|
||||
return this.workers.get(queueName);
|
||||
}
|
||||
|
||||
static getAll(): WorkerConfig[] {
|
||||
return Array.from(this.workers.values());
|
||||
}
|
||||
}
|
||||
126
src/jobs/boss/search/SearchWorker.ts
Normal file
126
src/jobs/boss/search/SearchWorker.ts
Normal file
@ -0,0 +1,126 @@
|
||||
import { Job } from 'pg-boss';
|
||||
import { AbstractWorker } from '../AbstractWorker.js';
|
||||
import { googleMaps, ResolveFlags } from '@polymech/search';
|
||||
import { supabase } from '../../../commons/supabase.js';
|
||||
import { logger } from '../../../commons/logger.js';
|
||||
import { Worker } from '../../../commons/decorators.js';
|
||||
|
||||
export interface SearchJobData {
|
||||
query: string;
|
||||
location: string;
|
||||
filters?: {
|
||||
filterCity?: string;
|
||||
filterContinent?: string;
|
||||
filterType?: string;
|
||||
concurrency?: number;
|
||||
};
|
||||
userId: string;
|
||||
usageId?: string;
|
||||
}
|
||||
|
||||
@Worker('search-worker')
|
||||
export class SearchWorker extends AbstractWorker<SearchJobData> {
|
||||
readonly queueName = 'search-worker';
|
||||
|
||||
calculateCost(job: Job<SearchJobData>, result: any): number {
|
||||
// Example: 1 credit per search + 0.1 per result
|
||||
return 1 + (result?.length || 0) * 0.1;
|
||||
}
|
||||
|
||||
protected async process(job: Job<SearchJobData>) {
|
||||
const { query, location, filters, userId } = job.data;
|
||||
|
||||
// Call existing logic (refactored from endpoints/competitors/index.ts)
|
||||
const results = await googleMaps({
|
||||
query,
|
||||
searchFrom: location,
|
||||
resolve: [ResolveFlags.PHOTOS],
|
||||
filterCity: filters?.filterCity,
|
||||
filterContinent: filters?.filterContinent,
|
||||
filterType: filters?.filterType,
|
||||
concurrency: filters?.concurrency || 5
|
||||
});
|
||||
|
||||
// Flatten results
|
||||
const flatResults = results ? results.flat(Infinity) : [];
|
||||
|
||||
// Map and Upsert Locations
|
||||
const locationsToUpsert = flatResults
|
||||
.filter((r: any) => r.place_id)
|
||||
.map((r: any) => ({
|
||||
place_id: r.place_id,
|
||||
title: r.title,
|
||||
description: r.description,
|
||||
address: r.address,
|
||||
gps_coordinates: r.gps_coordinates,
|
||||
phone: r.phone,
|
||||
website: r.website,
|
||||
operating_hours: r.operating_hours,
|
||||
thumbnail: r.thumbnail,
|
||||
types: r.types,
|
||||
raw_data: r,
|
||||
continent: r.geo?.continent,
|
||||
country: r.geo?.countryName,
|
||||
city: r.geo?.city,
|
||||
updated_at: new Date().toISOString(), // Update timestamp
|
||||
user_id: userId
|
||||
}));
|
||||
|
||||
// Fetch existing locations to preserve meta (emails)
|
||||
const placeIds = locationsToUpsert.map(l => l.place_id);
|
||||
if (placeIds.length > 0) {
|
||||
const { data: existingLocations } = await supabase
|
||||
.from('locations')
|
||||
.select('place_id, meta')
|
||||
.in('place_id', placeIds);
|
||||
|
||||
if (existingLocations) {
|
||||
const metaMap = new Map(existingLocations.map(l => [l.place_id, l.meta]));
|
||||
locationsToUpsert.forEach(l => {
|
||||
const existingMeta = metaMap.get(l.place_id);
|
||||
if (existingMeta) {
|
||||
// Merge existing meta into raw_data for the client
|
||||
l.raw_data.meta = {
|
||||
...(l.raw_data.meta || {}),
|
||||
...existingMeta
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (locationsToUpsert.length > 0) {
|
||||
const { error: upsertError } = await supabase
|
||||
.from('locations')
|
||||
.upsert(locationsToUpsert, { onConflict: 'place_id' });
|
||||
|
||||
if (upsertError) {
|
||||
logger.error(upsertError, 'Error upserting locations');
|
||||
throw upsertError;
|
||||
}
|
||||
}
|
||||
|
||||
// Store Search (for caching)
|
||||
// Re-create hash logic from handler
|
||||
const { createHash } = await import('crypto');
|
||||
const inputParams = { query, location };
|
||||
const normalizedInput = JSON.stringify(inputParams, Object.keys(inputParams).sort());
|
||||
const inputHash = createHash('sha256').update(normalizedInput).digest('hex');
|
||||
|
||||
const { error: searchStoreError } = await supabase
|
||||
.from('searches')
|
||||
.upsert({
|
||||
input_hash: inputHash,
|
||||
input_params: inputParams,
|
||||
result_place_ids: placeIds,
|
||||
created_at: new Date().toISOString()
|
||||
}, { onConflict: 'input_hash' });
|
||||
|
||||
if (searchStoreError) {
|
||||
logger.error(searchStoreError, `Error storing search ${searchStoreError.message}`);
|
||||
// Don't fail the job just because caching failed
|
||||
}
|
||||
|
||||
return { count: locationsToUpsert.length, placeIds };
|
||||
}
|
||||
}
|
||||
40
src/jobs/boss/workers.ts
Normal file
40
src/jobs/boss/workers.ts
Normal file
@ -0,0 +1,40 @@
|
||||
import { boss } from './client.js';
|
||||
import { getAllWorkers } from '@/products/registry.js';
|
||||
import { logger } from '@/commons/logger.js';
|
||||
|
||||
export const QUEUE_MOCK_JOB = 'mock-job';
|
||||
|
||||
interface MockJobData {
|
||||
subtasks: number;
|
||||
delayMs: number;
|
||||
shouldFail: boolean;
|
||||
}
|
||||
|
||||
export async function registerMockWorkers() {
|
||||
if (!boss) return;
|
||||
|
||||
// Product workers are now registered by the products themselves in AbstractProduct.start()
|
||||
|
||||
await boss.createQueue(QUEUE_MOCK_JOB);
|
||||
await boss.work<MockJobData>(QUEUE_MOCK_JOB, async (jobs: any) => {
|
||||
// PgBoss might pass an array of jobs or a single job depending on config/version
|
||||
const job = Array.isArray(jobs) ? jobs[0] : jobs;
|
||||
|
||||
const data = job.data || {};
|
||||
const { delayMs = 100, shouldFail = false } = data;
|
||||
const jobId = job.id;
|
||||
|
||||
logger.info({ jobId, data }, 'Processing PgBoss mock job');
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, delayMs));
|
||||
|
||||
if (shouldFail) {
|
||||
throw new Error('Simulated PgBoss job failure');
|
||||
}
|
||||
|
||||
logger.info({ jobId }, 'PgBoss mock job completed');
|
||||
return { success: true };
|
||||
});
|
||||
|
||||
logger.info('PgBoss workers registered');
|
||||
}
|
||||
5
src/lib/analytics-emitter.ts
Normal file
5
src/lib/analytics-emitter.ts
Normal file
@ -0,0 +1,5 @@
|
||||
import { EventEmitter } from 'events';
|
||||
|
||||
class AnalyticsEmitter extends EventEmitter { }
|
||||
|
||||
export const analyticsEmitter = new AnalyticsEmitter();
|
||||
171
src/middleware/analytics.ts
Normal file
171
src/middleware/analytics.ts
Normal file
@ -0,0 +1,171 @@
|
||||
import { Context, Next } from 'hono';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import axios from 'axios'
|
||||
import { analyticsEmitter } from '../lib/analytics-emitter.js';
|
||||
|
||||
// import { isBotRequest, isAIRequest } from '../products/serving/bots.js';
|
||||
|
||||
const ANALYTICS_FILE = path.resolve(process.cwd(), 'logs/analytics.jsonl');
|
||||
|
||||
// Extensions to ignore
|
||||
const IGNORED_EXTENSIONS = new Set([
|
||||
'.js', '.css', '.png', '.jpg', '.jpeg', '.gif', '.ico', '.svg', '.woff', '.woff2', '.ttf', '.eot', '.map'
|
||||
]);
|
||||
export const REVERSE_DEFAULT = { continent: 'unknown', countryName: 'unknown', city: 'unknown' }
|
||||
|
||||
export interface Geo {
|
||||
latitude: number
|
||||
lookupSource: string
|
||||
longitude: number
|
||||
localityLanguageRequested: string
|
||||
continent: string
|
||||
continentCode: string
|
||||
countryName: string
|
||||
countryCode: string
|
||||
principalSubdivision: string
|
||||
principalSubdivisionCode: string
|
||||
city: string
|
||||
locality: string
|
||||
postcode: string
|
||||
plusCode: string
|
||||
localityInfo: LocalityInfo
|
||||
}
|
||||
|
||||
export interface LocalityInfo {
|
||||
administrative: Administrative[]
|
||||
informative: Informative[]
|
||||
}
|
||||
|
||||
export interface Administrative {
|
||||
name: string
|
||||
description: string
|
||||
isoName?: string
|
||||
order: number
|
||||
adminLevel: number
|
||||
isoCode?: string
|
||||
wikidataId: string
|
||||
geonameId: number
|
||||
}
|
||||
|
||||
export interface Informative {
|
||||
name: string
|
||||
description?: string
|
||||
isoName?: string
|
||||
order: number
|
||||
isoCode?: string
|
||||
wikidataId?: string
|
||||
geonameId?: number
|
||||
}
|
||||
|
||||
const GEO_CACHE_FILE = path.resolve(process.cwd(), 'cache/geoip.json');
|
||||
|
||||
// Simple in-memory cache to reduce disk I/O, initialized on first use
|
||||
let geoCache: Record<string, any> | null = null;
|
||||
|
||||
const loadGeoCache = () => {
|
||||
if (geoCache) return geoCache;
|
||||
try {
|
||||
if (fs.existsSync(GEO_CACHE_FILE)) {
|
||||
const data = fs.readFileSync(GEO_CACHE_FILE, 'utf-8');
|
||||
geoCache = JSON.parse(data);
|
||||
} else {
|
||||
geoCache = {};
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error loading geo cache', e);
|
||||
geoCache = {};
|
||||
}
|
||||
return geoCache;
|
||||
};
|
||||
|
||||
const saveGeoCache = (ip: string, data: any) => {
|
||||
if (!geoCache) geoCache = {};
|
||||
geoCache[ip] = data;
|
||||
|
||||
// Ensure directory exists
|
||||
const dir = path.dirname(GEO_CACHE_FILE);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
|
||||
// Write to file (async to not block)
|
||||
fs.promises.writeFile(GEO_CACHE_FILE, JSON.stringify(geoCache, null, 2)).catch(err => {
|
||||
console.error('Error saving geo cache', err);
|
||||
});
|
||||
};
|
||||
|
||||
export const reverse = async (ip: string, opts: any) => {
|
||||
return REVERSE_DEFAULT;
|
||||
/*
|
||||
const cache = loadGeoCache();
|
||||
if (cache && cache[ip]) {
|
||||
return cache[ip];
|
||||
}
|
||||
|
||||
const config = CONFIG_DEFAULT() as any
|
||||
try {
|
||||
const q = `https://api-bdc.net/data/ip-geolocation?ip=${ip}&localityLanguage=en&key=${config.bigdata.key}`
|
||||
const ret = await axios.get(q) || { data: REVERSE_DEFAULT }
|
||||
const data = ret.data || REVERSE_DEFAULT
|
||||
saveGeoCache(ip, data);
|
||||
return data;
|
||||
} catch (e: any) {
|
||||
logger.error('Error reverse geocoding', e.message)
|
||||
return REVERSE_DEFAULT
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
export async function analyticsMiddleware(c: Context, next: Next) {
|
||||
await next(); // Execute the request first (non-blocking for the response?)
|
||||
// Wait, "await next()" blocks the middleware until the downstream handlers finish.
|
||||
// If we want to capture the status code, we need to wait.
|
||||
// The user asked for "non blocking analytics middleware".
|
||||
// Usually this means the *write* operation shouldn't block the response.
|
||||
// So we can do the logging logic *after* `await next()`, but ensuring the file write is not awaited or is fire-and-forget.
|
||||
|
||||
try {
|
||||
const url = new URL(c.req.url);
|
||||
const pathname = url.pathname;
|
||||
const extension = path.extname(pathname).toLowerCase();
|
||||
|
||||
// Filter static assets
|
||||
if (IGNORED_EXTENSIONS.has(extension)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Additional check for common static paths if they don't have extensions
|
||||
if (pathname.startsWith('/assets/') || pathname.startsWith('/static/')) {
|
||||
return;
|
||||
}
|
||||
|
||||
const ip = c.req.header('x-forwarded-for') || c.req.header('cf-connecting-ip') || '92.176.215.140'
|
||||
const geo = REVERSE_DEFAULT; // || ip !== 'unknown' ? await reverse(ip, CONFIG_DEFAULT()) : REVERSE_DEFAULT
|
||||
const userAgent = c.req.header('user-agent');
|
||||
const entry: any = {
|
||||
timestamp: new Date().toISOString(),
|
||||
method: c.req.method,
|
||||
path: pathname,
|
||||
status: c.res.status,
|
||||
ip,
|
||||
userAgent,
|
||||
// isBot: isBotRequest(userAgent),
|
||||
// isAI: isAIRequest(userAgent),
|
||||
referer: c.req.header('referer'),
|
||||
userId: c.get('userId'),
|
||||
geo
|
||||
};
|
||||
|
||||
const line = JSON.stringify(entry) + '\n';
|
||||
// Emit event for real-time streaming
|
||||
analyticsEmitter.emit('log', entry);
|
||||
// Fire and forget write
|
||||
fs.promises.appendFile(ANALYTICS_FILE, line).catch(err => {
|
||||
console.error('Failed to write to analytics file:', err);
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
console.error('Error in analytics middleware:', err);
|
||||
}
|
||||
}
|
||||
127
src/middleware/auth.ts
Normal file
127
src/middleware/auth.ts
Normal file
@ -0,0 +1,127 @@
|
||||
import { Context, Next } from 'hono';
|
||||
import { securityLogger as logger } from '../commons/logger.js';
|
||||
import { PublicEndpointRegistry, AdminEndpointRegistry } from '../commons/registry.js';
|
||||
import { getUserCached, supabase } from '../commons/supabase.js';
|
||||
|
||||
|
||||
/**
|
||||
* Strict authentication middleware – requires a valid Bearer token.
|
||||
*/
|
||||
export async function authMiddleware(c: Context, next: Next) {
|
||||
const authHeader = c.req.header('authorization');
|
||||
if (!authHeader?.startsWith('Bearer ')) {
|
||||
return c.json({ error: 'Unauthorized - Missing or invalid authorization header' }, 401);
|
||||
}
|
||||
const token = authHeader.substring(7);
|
||||
try {
|
||||
const user = await getUserCached(token);
|
||||
if (!user) {
|
||||
return c.json({ error: 'Invalid or expired token' }, 401);
|
||||
}
|
||||
c.set('userId', user.id);
|
||||
c.set('user', user);
|
||||
c.set('userEmail', user.email);
|
||||
await next();
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Auth middleware error');
|
||||
return c.json({ error: 'Authentication failed' }, 401);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional authentication middleware.
|
||||
* - Public endpoint: GET /api/products (no auth required).
|
||||
* - Otherwise respects REQUIRE_AUTH flag, but skips auth in test/dev environments.
|
||||
*/
|
||||
export async function optionalAuthMiddleware(c: Context, next: Next) {
|
||||
const path = c.req.path;
|
||||
const method = c.req.method;
|
||||
|
||||
// Public endpoint – allow unauthenticated access
|
||||
const isPublicEndpoint = PublicEndpointRegistry.isPublic(path, method);
|
||||
const isProductsEndpoint = method === 'GET' && path === '/api/products';
|
||||
if (isProductsEndpoint || isPublicEndpoint) {
|
||||
return await next();
|
||||
}
|
||||
|
||||
const requireAuth = process.env.REQUIRE_AUTH === 'true';
|
||||
const isTestEnv = false; // process.env.NODE_ENV === 'test' || process.env.NODE_ENV === 'development';
|
||||
const authHeader = c.req.header('authorization');
|
||||
|
||||
// If no auth header, or it's not a Bearer token...
|
||||
let token: string | undefined;
|
||||
|
||||
if (authHeader && authHeader.startsWith('Bearer ')) {
|
||||
token = authHeader.substring(7);
|
||||
} else {
|
||||
// Check for token in query param (for SSE)
|
||||
const queryToken = c.req.query('token');
|
||||
if (queryToken) {
|
||||
token = queryToken;
|
||||
}
|
||||
}
|
||||
|
||||
if (!token) {
|
||||
// ...and we are in test env or auth not required, just continue.
|
||||
if (!requireAuth) {
|
||||
return await next();
|
||||
}
|
||||
// ...otherwise reject
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
try {
|
||||
const user = await getUserCached(token);
|
||||
if (!user) {
|
||||
logger.warn('[Auth] Token verification failed');
|
||||
if (isTestEnv) {
|
||||
return await next();
|
||||
}
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
c.set('userId', user.id);
|
||||
c.set('user', user);
|
||||
c.set('userEmail', user.email);
|
||||
await next();
|
||||
} catch (err) {
|
||||
logger.error({ err }, '[Auth] Optional auth middleware error - REJECTING');
|
||||
return c.json({ error: 'Authentication failed' }, 401);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Admin‑only middleware – requires authentication and admin role.
|
||||
* Checks AdminEndpointRegistry to see if the route requires admin access.
|
||||
*/
|
||||
export async function adminMiddleware(c: Context, next: Next) {
|
||||
const path = c.req.path;
|
||||
const method = c.req.method;
|
||||
|
||||
// Check if this is an admin endpoint
|
||||
if (!AdminEndpointRegistry.isAdmin(path, method)) {
|
||||
return await next();
|
||||
}
|
||||
|
||||
// If it is an admin endpoint, enforce auth and role
|
||||
const userId = c.get('userId');
|
||||
if (!userId) {
|
||||
return c.json({ error: 'Unauthorized - Authentication required' }, 401);
|
||||
}
|
||||
try {
|
||||
const { data: profile, error } = await supabase
|
||||
.from('user_roles')
|
||||
.select('role')
|
||||
.eq('user_id', userId)
|
||||
.single();
|
||||
// @todo : fix db - type | multiple - currently single string
|
||||
if (error || !profile || profile.role !== 'admin') {
|
||||
return c.json({ error: 'Forbidden - Admin access required' }, 403);
|
||||
}
|
||||
c.set('isAdmin', true);
|
||||
await next();
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Admin middleware error');
|
||||
return c.json({ error: 'Authorization check failed' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
450
src/middleware/autoBan.ts
Normal file
450
src/middleware/autoBan.ts
Normal file
@ -0,0 +1,450 @@
|
||||
import { Context, Next } from 'hono'
|
||||
import { readFileSync, writeFileSync } from 'fs'
|
||||
import { join } from 'path'
|
||||
import { logger, securityLogger } from '../commons/logger.js'
|
||||
|
||||
interface BanList {
|
||||
bannedIPs: string[]
|
||||
bannedUserIds: string[]
|
||||
bannedTokens: string[]
|
||||
}
|
||||
|
||||
interface ViolationRecord {
|
||||
count: number
|
||||
firstViolation: number
|
||||
lastViolation: number
|
||||
}
|
||||
|
||||
// Configuration
|
||||
const BAN_THRESHOLD = parseInt(process.env.AUTO_BAN_THRESHOLD || '5', 10) // Number of violations before ban
|
||||
const VIOLATION_WINDOW_MS = parseInt(process.env.AUTO_BAN_WINDOW_MS || '10000', 10) // 1 minute default
|
||||
const VIOLATION_CLEANUP_INTERVAL = 10000 // Clean up old violations every minute
|
||||
|
||||
console.log('Auto-ban configured with:', {
|
||||
threshold: BAN_THRESHOLD,
|
||||
window: VIOLATION_WINDOW_MS / 60000,
|
||||
cleanupInterval: VIOLATION_CLEANUP_INTERVAL / 60000
|
||||
})
|
||||
|
||||
// In-memory violation tracking
|
||||
const violations = new Map<string, ViolationRecord>()
|
||||
|
||||
let banList: BanList = {
|
||||
bannedIPs: [],
|
||||
bannedUserIds: [],
|
||||
bannedTokens: [],
|
||||
}
|
||||
|
||||
/**
|
||||
* Load ban list from JSON file
|
||||
*/
|
||||
export function loadBanList(): BanList {
|
||||
try {
|
||||
const banListPath = join(process.cwd(), 'config', 'ban.json')
|
||||
const data = readFileSync(banListPath, 'utf-8')
|
||||
banList = JSON.parse(data)
|
||||
return banList
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Failed to load ban list')
|
||||
return banList
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save ban list to JSON file
|
||||
*/
|
||||
function saveBanList(): void {
|
||||
try {
|
||||
const banListPath = join(process.cwd(), 'config', 'ban.json')
|
||||
writeFileSync(banListPath, JSON.stringify(banList, null, 4), 'utf-8')
|
||||
logger.info('Ban list saved')
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Failed to save ban list')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current ban list
|
||||
*/
|
||||
export function getBanList(): BanList {
|
||||
return banList
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an IP is banned
|
||||
*/
|
||||
export function isIPBanned(ip: string): boolean {
|
||||
return banList.bannedIPs.includes(ip)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user ID is banned
|
||||
*/
|
||||
export function isUserBanned(userId: string): boolean {
|
||||
return banList.bannedUserIds.includes(userId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an auth token is banned
|
||||
*/
|
||||
export function isTokenBanned(token: string): boolean {
|
||||
return banList.bannedTokens.includes(token)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract IP address from request
|
||||
*/
|
||||
export function getClientIP(c: Context): string {
|
||||
// Check forwarded headers first (for proxies)
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
if (forwarded) {
|
||||
return forwarded.split(',')[0].trim()
|
||||
}
|
||||
|
||||
const realIp = c.req.header('x-real-ip')
|
||||
if (realIp) {
|
||||
return realIp
|
||||
}
|
||||
|
||||
// Fallback to connection IP (works for localhost)
|
||||
// In Node.js/Hono, we can try to get the remote address
|
||||
try {
|
||||
// @ts-ignore - accessing internal request object
|
||||
const remoteAddress = c.req.raw?.socket?.remoteAddress || c.env?.ip
|
||||
if (remoteAddress) {
|
||||
return remoteAddress
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
}
|
||||
|
||||
// Last resort: use localhost identifier
|
||||
return '127.0.0.1'
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract user ID from authorization header
|
||||
*/
|
||||
function getUserId(c: Context): string | null {
|
||||
const authHeader = c.req.header('authorization')
|
||||
if (!authHeader) return null
|
||||
return authHeader
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a rate limit violation
|
||||
*/
|
||||
export function recordViolation(key: string): void {
|
||||
const now = Date.now()
|
||||
const existing = violations.get(key)
|
||||
|
||||
if (existing) {
|
||||
// Check if violation is within the window
|
||||
if (now - existing.firstViolation <= VIOLATION_WINDOW_MS) {
|
||||
existing.count++
|
||||
existing.lastViolation = now
|
||||
violations.set(key, existing)
|
||||
|
||||
// Check if threshold exceeded
|
||||
if (existing.count >= BAN_THRESHOLD) {
|
||||
banEntity(key)
|
||||
}
|
||||
} else {
|
||||
// Reset violation count if outside window
|
||||
violations.set(key, {
|
||||
count: 1,
|
||||
firstViolation: now,
|
||||
lastViolation: now,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// First violation
|
||||
violations.set(key, {
|
||||
count: 1,
|
||||
firstViolation: now,
|
||||
lastViolation: now,
|
||||
})
|
||||
}
|
||||
|
||||
logger.debug({ key, violations: violations.get(key) }, 'Violation recorded')
|
||||
}
|
||||
|
||||
/**
|
||||
* Ban an entity (IP, user, or token)
|
||||
*/
|
||||
function banEntity(key: string): void {
|
||||
const [type, value] = key.split(':', 2)
|
||||
const violationRecord = violations.get(key)
|
||||
|
||||
let added = false
|
||||
if (type === 'ip' && !banList.bannedIPs.includes(value)) {
|
||||
banList.bannedIPs.push(value)
|
||||
added = true
|
||||
|
||||
// Log to security.json
|
||||
securityLogger.warn({
|
||||
event: 'auto_ban',
|
||||
type: 'ip',
|
||||
ip: value,
|
||||
violations: violationRecord?.count,
|
||||
firstViolation: violationRecord?.firstViolation,
|
||||
lastViolation: violationRecord?.lastViolation
|
||||
}, 'IP auto-banned for excessive requests')
|
||||
|
||||
// Also log to console
|
||||
logger.info({ ip: value, violations: violationRecord?.count }, '🚫 IP auto-banned for excessive requests')
|
||||
|
||||
} else if (type === 'user' && !banList.bannedUserIds.includes(value)) {
|
||||
banList.bannedUserIds.push(value)
|
||||
added = true
|
||||
|
||||
// Log to security.json
|
||||
securityLogger.warn({
|
||||
event: 'auto_ban',
|
||||
type: 'user',
|
||||
userId: value,
|
||||
violations: violationRecord?.count,
|
||||
firstViolation: violationRecord?.firstViolation,
|
||||
lastViolation: violationRecord?.lastViolation
|
||||
}, 'User auto-banned for excessive requests')
|
||||
|
||||
// Also log to console
|
||||
logger.info({ userId: value, violations: violationRecord?.count }, '🚫 User auto-banned for excessive requests')
|
||||
|
||||
} else if (type === 'token' && !banList.bannedTokens.includes(value)) {
|
||||
banList.bannedTokens.push(value)
|
||||
added = true
|
||||
|
||||
// Log to security.json
|
||||
securityLogger.warn({
|
||||
event: 'auto_ban',
|
||||
type: 'token',
|
||||
token: value.substring(0, 20) + '...',
|
||||
violations: violationRecord?.count,
|
||||
firstViolation: violationRecord?.firstViolation,
|
||||
lastViolation: violationRecord?.lastViolation
|
||||
}, 'Token auto-banned for excessive requests')
|
||||
|
||||
// Also log to console
|
||||
logger.info({ token: value.substring(0, 20) + '...', violations: violationRecord?.count }, '🚫 Token auto-banned for excessive requests')
|
||||
}
|
||||
|
||||
if (added) {
|
||||
saveBanList()
|
||||
// Clear violation record after ban
|
||||
violations.delete(key)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old violation records
|
||||
*/
|
||||
function cleanupViolations(): void {
|
||||
const now = Date.now()
|
||||
let cleaned = 0
|
||||
|
||||
for (const [key, record] of violations.entries()) {
|
||||
if (now - record.lastViolation > VIOLATION_WINDOW_MS) {
|
||||
violations.delete(key)
|
||||
cleaned++
|
||||
}
|
||||
}
|
||||
|
||||
if (cleaned > 0) {
|
||||
logger.debug({ cleaned }, 'Cleaned up old violation records')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto-ban middleware
|
||||
* Checks if request is from a banned entity
|
||||
*/
|
||||
|
||||
// Simple in-memory rate limiting
|
||||
const requestCounts = new Map<string, { count: number, resetTime: number }>()
|
||||
const RATE_LIMIT_MAX = parseInt(process.env.RATE_LIMIT_MAX || '20', 10)
|
||||
const RATE_LIMIT_WINDOW_MS = parseInt(process.env.RATE_LIMIT_WINDOW_MS || '1000', 10)
|
||||
|
||||
export async function autoBanMiddleware(c: Context, next: Next) {
|
||||
const ip = getClientIP(c)
|
||||
const path = c.req.path
|
||||
const method = c.req.method
|
||||
|
||||
// Skip ban/rate-limit checks for local requests (dev & e2e tests)
|
||||
if (ip === '127.0.0.1' || ip === 'localhost' || ip === '::1' || ip === '::ffff:127.0.0.1') {
|
||||
return next()
|
||||
}
|
||||
|
||||
const authHeader = c.req.header('authorization')
|
||||
const userId = getUserId(c)
|
||||
|
||||
// Generate key for rate limiting
|
||||
let key: string
|
||||
if (authHeader) {
|
||||
key = `user:${authHeader}`
|
||||
} else {
|
||||
key = `ip:${ip}`
|
||||
}
|
||||
|
||||
// Check if IP is banned
|
||||
if (isIPBanned(ip)) {
|
||||
/*
|
||||
securityLogger.info({
|
||||
event: 'blocked_request',
|
||||
type: 'ip',
|
||||
ip,
|
||||
path,
|
||||
method
|
||||
}, 'Blocked request from banned IP')
|
||||
*/
|
||||
|
||||
// logger.info({ ip, path }, '🚫 Blocked request from banned IP')
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your IP address has been banned for excessive requests',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
// Check if auth token is banned
|
||||
if (authHeader && isTokenBanned(authHeader)) {
|
||||
securityLogger.info({
|
||||
event: 'blocked_request',
|
||||
type: 'token',
|
||||
token: authHeader.substring(0, 20) + '...',
|
||||
path,
|
||||
method
|
||||
}, 'Blocked request from banned token')
|
||||
|
||||
logger.info({ token: authHeader.substring(0, 20) + '...', path }, '🚫 Blocked request from banned token')
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your access token has been banned for excessive requests',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
// Check if user ID is banned
|
||||
if (userId && isUserBanned(userId)) {
|
||||
securityLogger.info({
|
||||
event: 'blocked_request',
|
||||
type: 'user',
|
||||
userId,
|
||||
path,
|
||||
method
|
||||
}, 'Blocked request from banned user')
|
||||
|
||||
logger.info({ userId, path }, '🚫 Blocked request from banned user')
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your account has been banned for excessive requests',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
// Built-in rate limiting (since hono-rate-limiter isn't working)
|
||||
const now = Date.now()
|
||||
const record = requestCounts.get(key)
|
||||
|
||||
if (record) {
|
||||
if (now < record.resetTime) {
|
||||
// Within the window
|
||||
record.count++
|
||||
|
||||
if (record.count > RATE_LIMIT_MAX) {
|
||||
// Rate limit exceeded!
|
||||
console.log(`⚠️ Rate limit exceeded for ${key} (${record.count}/${RATE_LIMIT_MAX})`)
|
||||
recordViolation(key)
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Too many requests',
|
||||
message: `Rate limit exceeded. Maximum ${RATE_LIMIT_MAX} requests per ${RATE_LIMIT_WINDOW_MS}ms`,
|
||||
},
|
||||
429
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// Window expired, reset
|
||||
record.count = 1
|
||||
record.resetTime = now + RATE_LIMIT_WINDOW_MS
|
||||
}
|
||||
} else {
|
||||
// First request
|
||||
requestCounts.set(key, {
|
||||
count: 1,
|
||||
resetTime: now + RATE_LIMIT_WINDOW_MS
|
||||
})
|
||||
}
|
||||
await next()
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually unban an IP
|
||||
*/
|
||||
export function unbanIP(ip: string): boolean {
|
||||
const index = banList.bannedIPs.indexOf(ip)
|
||||
if (index > -1) {
|
||||
banList.bannedIPs.splice(index, 1)
|
||||
saveBanList()
|
||||
|
||||
securityLogger.info({
|
||||
event: 'unban',
|
||||
type: 'ip',
|
||||
ip
|
||||
}, 'IP unbanned')
|
||||
|
||||
logger.info({ ip }, 'IP unbanned')
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually unban a user
|
||||
*/
|
||||
export function unbanUser(userId: string): boolean {
|
||||
const index = banList.bannedUserIds.indexOf(userId)
|
||||
if (index > -1) {
|
||||
banList.bannedUserIds.splice(index, 1)
|
||||
saveBanList()
|
||||
|
||||
securityLogger.info({
|
||||
event: 'unban',
|
||||
type: 'user',
|
||||
userId
|
||||
}, 'User unbanned')
|
||||
|
||||
logger.info({ userId }, 'User unbanned')
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current violation stats
|
||||
*/
|
||||
export function getViolationStats() {
|
||||
return {
|
||||
totalViolations: violations.size,
|
||||
violations: Array.from(violations.entries()).map(([key, record]) => ({
|
||||
key,
|
||||
...record,
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
// Load ban list on module initialization
|
||||
loadBanList()
|
||||
|
||||
// Start cleanup interval
|
||||
setInterval(cleanupViolations, VIOLATION_CLEANUP_INTERVAL)
|
||||
134
src/middleware/blocklist.ts
Normal file
134
src/middleware/blocklist.ts
Normal file
@ -0,0 +1,134 @@
|
||||
import { Context, Next } from 'hono'
|
||||
import { readFileSync } from 'fs'
|
||||
import { join, dirname } from 'path'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url)
|
||||
const __dirname = dirname(__filename)
|
||||
|
||||
interface Blocklist {
|
||||
blockedIPs: string[]
|
||||
blockedUserIds: string[]
|
||||
blockedTokens: string[]
|
||||
}
|
||||
|
||||
let blocklist: Blocklist = {
|
||||
blockedIPs: [],
|
||||
blockedUserIds: [],
|
||||
blockedTokens: [],
|
||||
}
|
||||
|
||||
/**
|
||||
* Load blocklist from JSON file
|
||||
*/
|
||||
export function loadBlocklist(): Blocklist {
|
||||
try {
|
||||
const blocklistPath = join(process.cwd(), 'config', 'blocklist.json')
|
||||
const data = readFileSync(blocklistPath, 'utf-8')
|
||||
blocklist = JSON.parse(data)
|
||||
return blocklist
|
||||
} catch (error) {
|
||||
console.error('Failed to load blocklist:', error)
|
||||
return blocklist
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current blocklist
|
||||
*/
|
||||
export function getBlocklist(): Blocklist {
|
||||
return blocklist
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an IP is blocked
|
||||
*/
|
||||
export function isIPBlocked(ip: string): boolean {
|
||||
return blocklist.blockedIPs.includes(ip)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user ID is blocked
|
||||
*/
|
||||
export function isUserBlocked(userId: string): boolean {
|
||||
return blocklist.blockedUserIds.includes(userId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an auth token is blocked
|
||||
*/
|
||||
export function isTokenBlocked(token: string): boolean {
|
||||
return blocklist.blockedTokens.includes(token)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract IP address from request
|
||||
*/
|
||||
function getClientIP(c: Context): string {
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
if (forwarded) {
|
||||
return forwarded.split(',')[0].trim()
|
||||
}
|
||||
return c.req.header('x-real-ip') || 'unknown'
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract user ID from authorization header
|
||||
* This is a simple implementation - adjust based on your auth strategy
|
||||
*/
|
||||
function getUserId(c: Context): string | null {
|
||||
const authHeader = c.req.header('authorization')
|
||||
if (!authHeader) return null
|
||||
|
||||
// Simple extraction - in production, you'd decode JWT or validate token
|
||||
// For now, we'll use the auth header as-is for blocklist checking
|
||||
return authHeader
|
||||
}
|
||||
|
||||
/**
|
||||
* Blocklist middleware
|
||||
* Blocks requests from blacklisted IPs, users, or tokens
|
||||
*/
|
||||
export async function blocklistMiddleware(c: Context, next: Next) {
|
||||
const ip = getClientIP(c)
|
||||
const authHeader = c.req.header('authorization')
|
||||
const userId = getUserId(c)
|
||||
|
||||
// Check if IP is blocked
|
||||
if (isIPBlocked(ip)) {
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your IP address has been blocked',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
// Check if auth token is blocked
|
||||
if (authHeader && isTokenBlocked(authHeader)) {
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your access token has been blocked',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
// Check if user ID is blocked
|
||||
if (userId && isUserBlocked(userId)) {
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your account has been blocked',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
await next()
|
||||
}
|
||||
|
||||
// Load blocklist on module initialization
|
||||
loadBlocklist()
|
||||
106
src/middleware/rateLimiter.ts
Normal file
106
src/middleware/rateLimiter.ts
Normal file
@ -0,0 +1,106 @@
|
||||
import { Context, Next } from 'hono'
|
||||
import { rateLimiter } from 'hono-rate-limiter'
|
||||
import { recordViolation } from './autoBan.js'
|
||||
|
||||
// Rate limit configuration from environment variables
|
||||
const RATE_LIMIT_MAX = parseInt(process.env.RATE_LIMIT_MAX || '1', 10)
|
||||
const RATE_LIMIT_WINDOW_MS = parseInt(process.env.RATE_LIMIT_WINDOW_MS || '50', 10)
|
||||
|
||||
console.log('🔒 Rate Limiter Configuration:')
|
||||
console.log(` Max: ${RATE_LIMIT_MAX} requests per ${RATE_LIMIT_WINDOW_MS}ms`)
|
||||
console.log(` Auto-ban threshold: ${process.env.AUTO_BAN_THRESHOLD || 10} violations`)
|
||||
|
||||
|
||||
/**
|
||||
* Rate limiter middleware configuration
|
||||
* Limits requests per user/IP address
|
||||
*/
|
||||
export const apiRateLimiter = rateLimiter({
|
||||
windowMs: RATE_LIMIT_WINDOW_MS, // Time window in milliseconds
|
||||
limit: RATE_LIMIT_MAX, // Max requests per window
|
||||
standardHeaders: 'draft-6', // Return rate limit info in headers
|
||||
keyGenerator: (c: Context) => {
|
||||
// Try to get user ID from auth header, fallback to IP
|
||||
const authHeader = c.req.header('authorization')
|
||||
if (authHeader) {
|
||||
// Extract user ID from JWT or auth token if available
|
||||
// For now, use the auth header as key
|
||||
return `user:${authHeader}`
|
||||
}
|
||||
|
||||
// Fallback to IP address
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown'
|
||||
return `ip:${ip}`
|
||||
},
|
||||
handler: (c: Context) => {
|
||||
// Record violation for auto-ban tracking
|
||||
const authHeader = c.req.header('authorization')
|
||||
let key: string
|
||||
if (authHeader) {
|
||||
key = `user:${authHeader}`
|
||||
} else {
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown'
|
||||
key = `ip:${ip}`
|
||||
}
|
||||
|
||||
console.log(`⚠️ Rate limit exceeded for ${key}`)
|
||||
recordViolation(key)
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Too many requests',
|
||||
message: `Rate limit exceeded. Maximum ${RATE_LIMIT_MAX} requests per ${RATE_LIMIT_WINDOW_MS}ms`,
|
||||
},
|
||||
429
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
/**
|
||||
* Custom rate limiter for specific endpoints with different limits
|
||||
*/
|
||||
export function createCustomRateLimiter(limit: number, windowMs: number) {
|
||||
return rateLimiter({
|
||||
windowMs,
|
||||
limit,
|
||||
standardHeaders: 'draft-6',
|
||||
keyGenerator: (c: Context) => {
|
||||
const authHeader = c.req.header('authorization')
|
||||
if (authHeader) {
|
||||
return `user:${authHeader}`
|
||||
}
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown'
|
||||
return `ip:${ip}`
|
||||
},
|
||||
handler: (c: Context) => {
|
||||
// Record violation for auto-ban tracking
|
||||
const authHeader = c.req.header('authorization')
|
||||
let key: string
|
||||
if (authHeader) {
|
||||
key = `user:${authHeader}`
|
||||
} else {
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown'
|
||||
key = `ip:${ip}`
|
||||
}
|
||||
recordViolation(key)
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Too many requests',
|
||||
message: `Rate limit exceeded. Maximum ${limit} requests per ${windowMs}ms`,
|
||||
},
|
||||
429
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Export configuration for testing
|
||||
export const rateLimitConfig = {
|
||||
max: RATE_LIMIT_MAX,
|
||||
windowMs: RATE_LIMIT_WINDOW_MS,
|
||||
}
|
||||
305
src/middleware/usageTracking.ts
Normal file
305
src/middleware/usageTracking.ts
Normal file
@ -0,0 +1,305 @@
|
||||
import { Context, Next } from 'hono';
|
||||
import { supabase } from '../commons/supabase.js';
|
||||
import { logger } from '../commons/logger.js';
|
||||
import { FunctionRegistry } from '../commons/registry.js';
|
||||
|
||||
export interface UsageData {
|
||||
userId: string;
|
||||
endpoint: string;
|
||||
method: string;
|
||||
product: string;
|
||||
action: string;
|
||||
responseStatus?: number;
|
||||
responseTimeMs?: number;
|
||||
costUnits: number;
|
||||
cancellable: boolean;
|
||||
metadata?: Record<string, any>;
|
||||
apiKeyId?: string;
|
||||
jobId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware to track API usage for billing and monitoring
|
||||
* Tracks request start and updates with completion status
|
||||
*/
|
||||
export async function usageTrackingMiddleware(c: Context, next: Next) {
|
||||
const startTime = Date.now();
|
||||
|
||||
// Extract user ID from context (set by auth middleware)
|
||||
const userId = c.get('userId');
|
||||
// Skip tracking for unauthenticated requests
|
||||
if (!userId) {
|
||||
logger.trace('[UsageTracking] Skipping - No userId');
|
||||
await next();
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine product and action
|
||||
const path = c.req.path;
|
||||
const method = c.req.method;
|
||||
|
||||
// Use Registry to find config
|
||||
const config = FunctionRegistry.findByRoute(path, method);
|
||||
const product = config?.productId;
|
||||
const action = config?.actionId;
|
||||
|
||||
logger.trace(`[UsageTracking] Identified: product=${product}, action=${action}`);
|
||||
|
||||
// Skip if not a tracked endpoint
|
||||
if (!product || !action || !config) {
|
||||
logger.info('[UsageTracking] Skipping - Not a tracked endpoint');
|
||||
await next();
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate a job ID for this request
|
||||
const jobId = `${product}_${action}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
|
||||
// Create initial usage record with 'processing' status
|
||||
let usageId: string | null = null;
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('api_usage')
|
||||
.insert({
|
||||
user_id: userId,
|
||||
endpoint: path,
|
||||
method,
|
||||
product,
|
||||
action,
|
||||
status: 'processing',
|
||||
job_id: jobId,
|
||||
cancellable: config.cancellable || false,
|
||||
cost_units: config.costUnits,
|
||||
metadata: {
|
||||
query: c.req.query(),
|
||||
userAgent: c.req.header('user-agent'),
|
||||
ip: c.req.header('x-forwarded-for') || c.req.header('x-real-ip'),
|
||||
},
|
||||
})
|
||||
.select('id')
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, '[UsageTracking] Error creating usage record');
|
||||
} else if (data) {
|
||||
logger.trace(`[UsageTracking] Created usage record: ${data.id}`);
|
||||
usageId = data.id;
|
||||
// Store usage ID in context for potential use in handlers
|
||||
c.set('usageId', usageId);
|
||||
c.set('jobId', jobId);
|
||||
} else {
|
||||
logger.trace('[UsageTracking] No data returned from insert');
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to create usage record');
|
||||
}
|
||||
|
||||
// Execute the request
|
||||
let requestError: Error | null = null;
|
||||
try {
|
||||
await next();
|
||||
} catch (err) {
|
||||
requestError = err as Error;
|
||||
throw err; // Re-throw to let error handler deal with it
|
||||
} finally {
|
||||
// Update usage record with completion status
|
||||
const endTime = Date.now();
|
||||
const responseTime = endTime - startTime;
|
||||
|
||||
if (usageId) {
|
||||
// Check if handler requested to skip status update (e.g. for background jobs)
|
||||
const skipUpdate = c.get('skipUsageStatusUpdate');
|
||||
|
||||
if (!skipUpdate) {
|
||||
updateUsageRecord({
|
||||
usageId,
|
||||
responseStatus: c.res.status,
|
||||
responseTimeMs: responseTime,
|
||||
error: requestError,
|
||||
}).catch(err => {
|
||||
logger.error({ err }, 'Failed to update usage record');
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update usage record with completion status
|
||||
*/
|
||||
export async function updateUsageRecord(data: {
|
||||
usageId: string;
|
||||
responseStatus: number;
|
||||
responseTimeMs: number;
|
||||
error?: Error | null;
|
||||
}) {
|
||||
const status = data.error
|
||||
? 'failed'
|
||||
: (data.responseStatus >= 200 && data.responseStatus < 300)
|
||||
? 'completed'
|
||||
: 'failed';
|
||||
|
||||
const updateData: any = {
|
||||
status,
|
||||
response_status: data.responseStatus,
|
||||
response_time_ms: data.responseTimeMs,
|
||||
};
|
||||
|
||||
if (data.error) {
|
||||
updateData.error_message = data.error.message;
|
||||
}
|
||||
|
||||
const { error } = await supabase
|
||||
.from('api_usage')
|
||||
.update(updateData)
|
||||
.eq('id', data.usageId);
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error updating usage record');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to manually track usage (for non-middleware scenarios)
|
||||
*/
|
||||
export async function trackUsage(data: UsageData): Promise<string | null> {
|
||||
try {
|
||||
const { data: record, error } = await supabase
|
||||
.from('api_usage')
|
||||
.insert({
|
||||
user_id: data.userId,
|
||||
endpoint: data.endpoint,
|
||||
method: data.method,
|
||||
product: data.product,
|
||||
action: data.action,
|
||||
status: data.responseStatus ? 'completed' : 'processing',
|
||||
job_id: data.jobId,
|
||||
cancellable: data.cancellable,
|
||||
response_status: data.responseStatus,
|
||||
response_time_ms: data.responseTimeMs,
|
||||
cost_units: data.costUnits,
|
||||
metadata: data.metadata,
|
||||
api_key_id: data.apiKeyId,
|
||||
})
|
||||
.select('id')
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error tracking usage');
|
||||
return null;
|
||||
}
|
||||
|
||||
return record?.id || null;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to track usage');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Cancel a job by job ID
|
||||
*/
|
||||
export async function cancelJob(userId: string, jobId: string): Promise<boolean> {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('api_usage')
|
||||
.update({
|
||||
status: 'cancelled',
|
||||
})
|
||||
.eq('user_id', userId)
|
||||
.eq('job_id', jobId)
|
||||
.eq('cancellable', true)
|
||||
.in('status', ['pending', 'processing'])
|
||||
.select('id');
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error cancelling job');
|
||||
return false;
|
||||
}
|
||||
|
||||
return !!data && data.length > 0;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to cancel job');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active (cancellable) jobs for a user
|
||||
*/
|
||||
export async function getActiveJobs(userId: string) {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('api_usage')
|
||||
.select('id, job_id, product, action, status, created_at, metadata')
|
||||
.eq('user_id', userId)
|
||||
.eq('cancellable', true)
|
||||
.in('status', ['pending', 'processing'])
|
||||
.order('created_at', { ascending: false });
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error fetching active jobs');
|
||||
return [];
|
||||
}
|
||||
|
||||
return data || [];
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to fetch active jobs');
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pause a job by job ID
|
||||
*/
|
||||
export async function pauseJob(userId: string, jobId: string): Promise<boolean> {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('api_usage')
|
||||
.update({
|
||||
status: 'paused',
|
||||
})
|
||||
.eq('user_id', userId)
|
||||
.eq('job_id', jobId)
|
||||
.eq('cancellable', true)
|
||||
.eq('status', 'processing') // Only processing jobs can be paused
|
||||
.select('id');
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error pausing job');
|
||||
return false;
|
||||
}
|
||||
|
||||
return !!data && data.length > 0;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to pause job');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume a paused job by job ID
|
||||
*/
|
||||
export async function resumeJob(userId: string, jobId: string): Promise<boolean> {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('api_usage')
|
||||
.update({
|
||||
status: 'processing',
|
||||
})
|
||||
.eq('user_id', userId)
|
||||
.eq('job_id', jobId)
|
||||
.eq('cancellable', true)
|
||||
.eq('status', 'paused') // Only paused jobs can be resumed
|
||||
.select('id');
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error resuming job');
|
||||
return false;
|
||||
}
|
||||
|
||||
return !!data && data.length > 0;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to resume job');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
139
src/products/AbstractProduct.ts
Normal file
139
src/products/AbstractProduct.ts
Normal file
@ -0,0 +1,139 @@
|
||||
import EventEmitter from 'events';
|
||||
import { createHash } from 'crypto';
|
||||
import { streamSSE } from 'hono/streaming';
|
||||
import { EventBus } from './EventBus.js';
|
||||
import { ProductErrorCode } from './enums.js';
|
||||
import { ProductError } from './errors.js';
|
||||
import { logger } from '../commons/logger.js';
|
||||
|
||||
export interface JobCreationEvent {
|
||||
queue: string;
|
||||
data: any;
|
||||
options: any;
|
||||
}
|
||||
|
||||
export interface StreamOptions<TData = any> {
|
||||
data: TData;
|
||||
userId: string;
|
||||
forceRefresh?: boolean;
|
||||
fetcher: (data: TData, userId: string) => Promise<any[]>;
|
||||
cacheChecker?: (hash: string) => Promise<any[] | null>;
|
||||
}
|
||||
|
||||
export abstract class AbstractProduct<TJobData = any> extends EventEmitter {
|
||||
abstract readonly id: string;
|
||||
abstract readonly jobOptions: any;
|
||||
abstract readonly actions: Record<string, any>;
|
||||
abstract readonly workers: any[];
|
||||
abstract readonly routes: any[];
|
||||
|
||||
async start(boss?: any) {
|
||||
try {
|
||||
await this.onStart(boss);
|
||||
} catch (error: any) {
|
||||
throw new ProductError(ProductErrorCode.START_FAILED, {
|
||||
message: `Failed to start product ${this.id}: ${error.message}`,
|
||||
originalError: error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
protected async onStart(boss?: any) {
|
||||
// Optional hook for subclasses
|
||||
}
|
||||
|
||||
async stop() {
|
||||
try {
|
||||
await this.onStop();
|
||||
} catch (error: any) {
|
||||
throw new ProductError(ProductErrorCode.STOP_FAILED, {
|
||||
message: `Failed to stop product ${this.id}: ${error.message}`,
|
||||
originalError: error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
protected async onStop() {
|
||||
// Optional hook
|
||||
}
|
||||
|
||||
async pause() {
|
||||
// No-op for now as we removed pgboss
|
||||
}
|
||||
|
||||
async resume() {
|
||||
// No-op for now as we removed pgboss
|
||||
}
|
||||
|
||||
protected async handleStream(c: any, options: StreamOptions) {
|
||||
const { data, userId, forceRefresh, fetcher, cacheChecker } = options;
|
||||
|
||||
const inputHash = this.generateHash(data);
|
||||
|
||||
return streamSSE(c, async (stream) => {
|
||||
try {
|
||||
await stream.writeSSE({
|
||||
event: 'progress',
|
||||
data: JSON.stringify({ stage: 'starting', percent: 0 })
|
||||
});
|
||||
|
||||
if (!forceRefresh && cacheChecker) {
|
||||
await stream.writeSSE({
|
||||
event: 'progress',
|
||||
data: JSON.stringify({ stage: 'checking_cache', percent: 10 })
|
||||
});
|
||||
|
||||
const cached = await cacheChecker(inputHash);
|
||||
if (cached) {
|
||||
for (let i = 0; i < cached.length; i++) {
|
||||
await stream.writeSSE({
|
||||
event: 'result',
|
||||
data: JSON.stringify(cached[i])
|
||||
});
|
||||
}
|
||||
await stream.writeSSE({
|
||||
event: 'complete',
|
||||
data: JSON.stringify({ total: cached.length, cached: true })
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
await stream.writeSSE({
|
||||
event: 'progress',
|
||||
data: JSON.stringify({ stage: 'fetching_from_api', percent: 20 })
|
||||
});
|
||||
|
||||
const results = await fetcher(data, userId);
|
||||
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
await stream.writeSSE({
|
||||
event: 'result',
|
||||
data: JSON.stringify(results[i])
|
||||
});
|
||||
}
|
||||
|
||||
await stream.writeSSE({
|
||||
event: 'complete',
|
||||
data: JSON.stringify({ total: results.length, cached: false })
|
||||
});
|
||||
|
||||
} catch (error: any) {
|
||||
logger.error(error, `[${this.id}] Stream error`);
|
||||
await stream.writeSSE({
|
||||
event: 'error',
|
||||
data: JSON.stringify({ error: error.message || 'Internal Server Error' })
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Helper for hashing
|
||||
protected generateHash(params: any) {
|
||||
const normalizedInput = JSON.stringify(params, Object.keys(params).sort());
|
||||
return createHash('sha256').update(normalizedInput).digest('hex');
|
||||
}
|
||||
|
||||
abstract hash(data: TJobData): string;
|
||||
abstract meta(userId: string): any;
|
||||
}
|
||||
3
src/products/EventBus.ts
Normal file
3
src/products/EventBus.ts
Normal file
@ -0,0 +1,3 @@
|
||||
import EventEmitter from 'events';
|
||||
|
||||
export const EventBus = new EventEmitter();
|
||||
141
src/products/analytics/index.ts
Normal file
141
src/products/analytics/index.ts
Normal file
@ -0,0 +1,141 @@
|
||||
import { Context } from 'hono';
|
||||
import { streamSSE } from 'hono/streaming';
|
||||
import { AbstractProduct } from '../AbstractProduct.js';
|
||||
import { getAnalyticsRoute, getAnalyticsStreamRoute, deleteAnalyticsRoute } from './routes.js';
|
||||
import { analyticsEmitter } from '../../lib/analytics-emitter.js';
|
||||
import { CONFIG_DEFAULT } from '@polymech/commons';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import readline from 'readline';
|
||||
|
||||
const ANALYTICS_FILE = path.resolve(process.cwd(), 'logs/analytics.jsonl');
|
||||
|
||||
export class AnalyticsProduct extends AbstractProduct<any> {
|
||||
id = 'analytics';
|
||||
jobOptions = {};
|
||||
actions = {}; // Optional: Add actions if needed for jobs
|
||||
workers: any[] = [];
|
||||
routes: any[] = [];
|
||||
hash = () => 'analytics-hash';
|
||||
meta = () => ({});
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.initializeRoutes();
|
||||
}
|
||||
|
||||
initializeRoutes() {
|
||||
this.routes.push({
|
||||
definition: getAnalyticsRoute,
|
||||
handler: this.handleGetAnalytics.bind(this)
|
||||
});
|
||||
this.routes.push({
|
||||
definition: getAnalyticsStreamRoute,
|
||||
handler: this.handleGetAnalyticsStream.bind(this)
|
||||
});
|
||||
this.routes.push({
|
||||
definition: deleteAnalyticsRoute,
|
||||
handler: this.handleDeleteAnalytics.bind(this)
|
||||
});
|
||||
}
|
||||
|
||||
// ... existing handlers
|
||||
|
||||
async handleDeleteAnalytics(c: Context) {
|
||||
try {
|
||||
if (fs.existsSync(ANALYTICS_FILE)) {
|
||||
// Truncate file
|
||||
await fs.promises.truncate(ANALYTICS_FILE, 0);
|
||||
}
|
||||
return c.json({ success: true });
|
||||
} catch (err: any) {
|
||||
console.error('Error clearing analytics:', err);
|
||||
return c.json({ error: 'Internal Server Error' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async handleGetAnalyticsStream(c: Context) {
|
||||
return streamSSE(c, async (stream) => {
|
||||
const listener = async (entry: any) => {
|
||||
await stream.writeSSE({
|
||||
data: JSON.stringify(entry),
|
||||
event: 'log',
|
||||
});
|
||||
};
|
||||
|
||||
analyticsEmitter.on('log', listener);
|
||||
|
||||
// Keep connection alive or handle disconnect
|
||||
// Hono's streamSSE handles closing the stream when the connection drops,
|
||||
// but we need to remove the listener to avoid leaks.
|
||||
stream.onAbort(() => {
|
||||
analyticsEmitter.off('log', listener);
|
||||
});
|
||||
|
||||
// Wait forever (or until client disconnects)
|
||||
while (true) {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async handleGetAnalytics(c: Context) {
|
||||
try {
|
||||
const limit = parseInt(c.req.query('limit') || '100', 10);
|
||||
const startDateStr = c.req.query('startDate');
|
||||
const endDateStr = c.req.query('endDate');
|
||||
|
||||
const startDate = startDateStr ? new Date(startDateStr).getTime() : 0;
|
||||
const endDate = endDateStr ? new Date(endDateStr).getTime() : Date.now();
|
||||
|
||||
if (!fs.existsSync(ANALYTICS_FILE)) {
|
||||
return c.json([]);
|
||||
}
|
||||
|
||||
// Efficiently read last N lines would be better, but for "filtered" queries we generally need to scan.
|
||||
// If file is huge, this is slow.
|
||||
// However, typical usage for "analytics middleware... for now" implies simple logging.
|
||||
// We will stream the file from the beginning (or end if we could) and collect matching entries.
|
||||
// To respect 'limit' effectively with date filters, we ideally want the *latest* entries.
|
||||
// So reading from end or collecting all and sorting/slicing is needed.
|
||||
// Collecting all in memory is dangerous for large files.
|
||||
// But implementing reverse line reading is complex without a library.
|
||||
// Compromise: Read all, parse, filter, take last N.
|
||||
// Optimization: If no date filter, reasonable to assume we want latest.
|
||||
|
||||
const logs: any[] = [];
|
||||
|
||||
const fileStream = fs.createReadStream(ANALYTICS_FILE);
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity
|
||||
});
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line.trim()) continue;
|
||||
try {
|
||||
const entry = JSON.parse(line);
|
||||
const timestamp = new Date(entry.timestamp).getTime();
|
||||
|
||||
if (timestamp >= startDate && timestamp <= endDate) {
|
||||
logs.push(entry);
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore bad lines
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp desc
|
||||
logs.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
|
||||
|
||||
// Limit
|
||||
const result = logs.slice(0, limit);
|
||||
|
||||
return c.json(result);
|
||||
|
||||
} catch (err: any) {
|
||||
console.error('Error reading analytics:', err);
|
||||
return c.json({ error: 'Internal Server Error' }, 500);
|
||||
}
|
||||
}
|
||||
}
|
||||
84
src/products/analytics/routes.ts
Normal file
84
src/products/analytics/routes.ts
Normal file
@ -0,0 +1,84 @@
|
||||
import { createRoute, z } from '@hono/zod-openapi';
|
||||
|
||||
export const getAnalyticsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/analytics',
|
||||
tags: ['Analytics'],
|
||||
summary: 'Get Analytics Data',
|
||||
description: 'Retrieve analytics data from the log file, optionally filtered by date.',
|
||||
security: [{ bearerAuth: [] }],
|
||||
request: {
|
||||
query: z.object({
|
||||
limit: z.string().optional().default('100').openapi({ description: 'Number of entries to return (default 100)' }),
|
||||
startDate: z.string().optional().openapi({ description: 'Filter entries after this date (ISO string)' }),
|
||||
endDate: z.string().optional().openapi({ description: 'Filter entries before this date (ISO string)' }),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Analytics Data',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.array(z.object({
|
||||
timestamp: z.string(),
|
||||
method: z.string(),
|
||||
path: z.string(),
|
||||
status: z.number(),
|
||||
ip: z.string(),
|
||||
userAgent: z.string().optional(),
|
||||
referer: z.string().optional(),
|
||||
userId: z.string().optional()
|
||||
})),
|
||||
},
|
||||
},
|
||||
},
|
||||
401: {
|
||||
description: 'Unauthorized',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const getAnalyticsStreamRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/analytics/stream',
|
||||
tags: ['Analytics'],
|
||||
summary: 'Stream Analytics Data',
|
||||
description: 'Stream real-time analytics data via Server-Sent Events (SSE).',
|
||||
security: [{ bearerAuth: [] }],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Analytics Event Stream',
|
||||
content: {
|
||||
'text/event-stream': {
|
||||
schema: z.string(),
|
||||
},
|
||||
},
|
||||
},
|
||||
401: {
|
||||
description: 'Unauthorized',
|
||||
},
|
||||
},
|
||||
|
||||
});
|
||||
|
||||
export const deleteAnalyticsRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/api/analytics',
|
||||
tags: ['Analytics'],
|
||||
summary: 'Clear Analytics Data',
|
||||
description: 'Clear all analytics data from the log file.',
|
||||
security: [{ bearerAuth: [] }],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Analytics Data Cleared',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ success: z.boolean() }),
|
||||
},
|
||||
},
|
||||
},
|
||||
401: {
|
||||
description: 'Unauthorized',
|
||||
},
|
||||
},
|
||||
});
|
||||
22
src/products/enums.ts
Normal file
22
src/products/enums.ts
Normal file
@ -0,0 +1,22 @@
|
||||
export enum ProductErrorCode {
|
||||
// Lifecycle Errors
|
||||
START_FAILED = 'PRODUCT_START_FAILED',
|
||||
STOP_FAILED = 'PRODUCT_STOP_FAILED',
|
||||
PAUSE_FAILED = 'PRODUCT_PAUSE_FAILED',
|
||||
RESUME_FAILED = 'PRODUCT_RESUME_FAILED',
|
||||
|
||||
// Worker Errors
|
||||
WORKER_REGISTRATION_FAILED = 'WORKER_REGISTRATION_FAILED',
|
||||
WORKER_NOT_FOUND = 'WORKER_NOT_FOUND',
|
||||
|
||||
// Job Errors
|
||||
JOB_SUBMISSION_FAILED = 'JOB_SUBMISSION_FAILED',
|
||||
JOB_TIMEOUT = 'JOB_TIMEOUT',
|
||||
|
||||
// Configuration Errors
|
||||
INVALID_CONFIG = 'INVALID_CONFIG',
|
||||
MISSING_DEPENDENCY = 'MISSING_DEPENDENCY',
|
||||
|
||||
// Generic
|
||||
UNKNOWN_ERROR = 'UNKNOWN_ERROR'
|
||||
}
|
||||
29
src/products/errors.ts
Normal file
29
src/products/errors.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import { ProductErrorCode } from './enums.js';
|
||||
|
||||
export interface ProductErrorPayload {
|
||||
message: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
export class ProductError extends Error {
|
||||
public readonly code: ProductErrorCode;
|
||||
public readonly payload: ProductErrorPayload;
|
||||
|
||||
constructor(code: ProductErrorCode, payload: ProductErrorPayload | string) {
|
||||
const message = typeof payload === 'string' ? payload : payload.message;
|
||||
super(message);
|
||||
this.code = code;
|
||||
this.payload = typeof payload === 'string' ? { message: payload } : payload;
|
||||
|
||||
// Restore prototype chain
|
||||
Object.setPrototypeOf(this, new.target.prototype);
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
code: this.code,
|
||||
message: this.message,
|
||||
payload: this.payload
|
||||
};
|
||||
}
|
||||
}
|
||||
99
src/products/openai/handlers.ts
Normal file
99
src/products/openai/handlers.ts
Normal file
@ -0,0 +1,99 @@
|
||||
import { Context } from 'hono';
|
||||
import { logger } from '../../commons/logger.js';
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
|
||||
// Helper to get Supabase credentials (copied from auth middleware logic)
|
||||
const getSupabaseCredentials = () => {
|
||||
const url = process.env.SUPABASE_URL;
|
||||
const key = process.env.SUPABASE_SERVICE_KEY;
|
||||
if (!url || !key) {
|
||||
throw new Error('Supabase credentials missing via process.env');
|
||||
}
|
||||
return { url, key };
|
||||
};
|
||||
|
||||
export async function handleChatCompletions(c: Context) {
|
||||
const userId = c.get('userId');
|
||||
if (!userId) {
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
try {
|
||||
// 1. Fetch User API Key
|
||||
const { url, key } = getSupabaseCredentials();
|
||||
const supabase = createClient(url, key);
|
||||
|
||||
const { data: userSecrets, error: secretsError } = await supabase
|
||||
.from('user_secrets')
|
||||
.select('settings')
|
||||
.eq('user_id', userId)
|
||||
.maybeSingle();
|
||||
|
||||
if (secretsError) {
|
||||
logger.error({ err: secretsError, userId }, 'Failed to fetch user secrets');
|
||||
return c.json({ error: 'Internal Server Error' }, 500);
|
||||
}
|
||||
|
||||
// Add debug logging
|
||||
logger.debug({ userId, hasSecrets: !!userSecrets, settings: userSecrets?.settings }, 'Checking for OpenAI API key');
|
||||
|
||||
const apiKey = (userSecrets?.settings as any)?.api_keys?.openai_api_key;
|
||||
|
||||
if (!apiKey) {
|
||||
logger.warn({ userId }, 'Missing OpenAI API key in user_secrets');
|
||||
return c.json({ error: 'OpenAI API key not found. Please add it to your profile settings.' }, 400);
|
||||
}
|
||||
|
||||
// 2. Prepare Request to OpenAI
|
||||
const body = await c.req.json();
|
||||
|
||||
// Log request (sanitize sensitive data)
|
||||
logger.info({ userId, model: body.model }, 'Proxying OpenAI request');
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
};
|
||||
|
||||
// 3. Make Request to OpenAI
|
||||
const response = await fetch('https://api.openai.com/v1/chat/completions', {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
|
||||
// 4. Handle Response
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
logger.error({ status: response.status, errorText, userId }, 'OpenAI API error');
|
||||
// Try to parse error as JSON to return proper error object
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText);
|
||||
return c.json(errorJson, response.status as any);
|
||||
} catch (e) {
|
||||
return c.text(errorText, response.status as any);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Stream Response if requested
|
||||
if (body.stream) {
|
||||
// Need to handle streaming response properly in Hono/Node
|
||||
// We can return the body stream directly
|
||||
|
||||
return new Response(response.body, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return c.json(data);
|
||||
|
||||
} catch (err: any) {
|
||||
logger.error({ err, userId }, 'OpenAI Proxy handler failed');
|
||||
return c.json({ error: 'Internal Server Error' }, 500);
|
||||
}
|
||||
}
|
||||
37
src/products/openai/index.ts
Normal file
37
src/products/openai/index.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import { AbstractProduct } from '../AbstractProduct.js';
|
||||
import { postChatCompletionsRoute } from './routes.js';
|
||||
import { handleChatCompletions } from './handlers.js';
|
||||
|
||||
export class OpenAIProduct extends AbstractProduct<any> {
|
||||
id = 'openai';
|
||||
jobOptions = {};
|
||||
actions = {};
|
||||
workers = [];
|
||||
routes: any[] = [];
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.initializeRoutes();
|
||||
}
|
||||
|
||||
initializeRoutes() {
|
||||
// Register the chat completion route
|
||||
// We use CachedHandler here just to wrap it properly, but we probably don't want to actually cache LLM responses aggressively
|
||||
// unless we implement specific caching logic. For now, let's use the handler directly or create a simple wrapper if needed.
|
||||
// Actually, AbstractProduct expects { definition, handler } objects.
|
||||
// And `registry.ts` does: app.openapi(route.definition, route.handler);
|
||||
|
||||
this.routes.push({
|
||||
definition: postChatCompletionsRoute,
|
||||
handler: handleChatCompletions
|
||||
});
|
||||
}
|
||||
|
||||
hash(data: any): string {
|
||||
return 'openai-hash';
|
||||
}
|
||||
|
||||
meta(userId: string): any {
|
||||
return { userId };
|
||||
}
|
||||
}
|
||||
58
src/products/openai/routes.ts
Normal file
58
src/products/openai/routes.ts
Normal file
@ -0,0 +1,58 @@
|
||||
import { createRoute, z } from '@hono/zod-openapi';
|
||||
|
||||
export const postChatCompletionsRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/openai/v1/chat/completions',
|
||||
tags: ['OpenAI'],
|
||||
summary: 'Chat Completions Proxy',
|
||||
description: 'Proxies chat completion requests to OpenAI, injecting user API key.',
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
model: z.string(),
|
||||
messages: z.array(z.object({
|
||||
role: z.string(),
|
||||
content: z.any() // string or array (for multimodal)
|
||||
})),
|
||||
stream: z.boolean().optional(),
|
||||
temperature: z.number().optional(),
|
||||
top_p: z.number().optional(),
|
||||
n: z.number().optional(),
|
||||
presence_penalty: z.number().optional(),
|
||||
frequency_penalty: z.number().optional(),
|
||||
logit_bias: z.record(z.string(), z.number()).optional(),
|
||||
user: z.string().optional(),
|
||||
max_tokens: z.number().optional(),
|
||||
response_format: z.any().optional(),
|
||||
tools: z.array(z.any()).optional(),
|
||||
tool_choice: z.any().optional(),
|
||||
}).passthrough() // Allow other OpenAI params
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Chat completion response',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
},
|
||||
'text/event-stream': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
},
|
||||
400: {
|
||||
description: 'Bad Request'
|
||||
},
|
||||
401: {
|
||||
description: 'Unauthorized'
|
||||
},
|
||||
500: {
|
||||
description: 'Internal Server Error'
|
||||
}
|
||||
}
|
||||
});
|
||||
57
src/products/registry.ts
Normal file
57
src/products/registry.ts
Normal file
@ -0,0 +1,57 @@
|
||||
|
||||
import './subscriber.js';
|
||||
|
||||
import { OpenAIProduct } from './openai/index.js';
|
||||
import { AnalyticsProduct } from './analytics/index.js';
|
||||
|
||||
// import './subscriber.js';
|
||||
|
||||
let instances: any[] = [];
|
||||
export const ALL_PRODUCTS = instances;
|
||||
|
||||
export const registerProductRoutes = async (app: any) => {
|
||||
console.log('Registering product routes');
|
||||
// Instantiate all products
|
||||
instances = [
|
||||
new OpenAIProduct(),
|
||||
new AnalyticsProduct(),
|
||||
];
|
||||
|
||||
instances.forEach(product => {
|
||||
console.log(`Registering routes for product ${product.id}`);
|
||||
product.routes.forEach((route: any) => {
|
||||
// @ts-ignore
|
||||
app.openapi(route.definition, route.handler);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export const getAllWorkers = () => {
|
||||
return instances.flatMap(p => p.workers || []);
|
||||
};
|
||||
|
||||
export const startProducts = async (boss?: any) => {
|
||||
for (const product of instances) {
|
||||
try {
|
||||
// Create a timeout promise
|
||||
const timeoutPromise = new Promise((_, reject) => {
|
||||
const id = setTimeout(() => {
|
||||
clearTimeout(id);
|
||||
// @ts-ignore
|
||||
reject(new Error(`Product ${product?.id || 'unknown'} startup timed out`));
|
||||
}, 20000); // 5 seconds timeout
|
||||
});
|
||||
|
||||
// Race the product start against the timeout
|
||||
await Promise.race([
|
||||
product.start(boss),
|
||||
timeoutPromise
|
||||
]);
|
||||
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
console.error(`Failed to start product ${product.id}`, err);
|
||||
// Continue with other products even if one fails
|
||||
}
|
||||
}
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user