server:basic shit
This commit is contained in:
parent
fff4359515
commit
f49ee39eb5
@ -7,7 +7,7 @@ export class MemoryCache implements CacheAdapter {
|
||||
constructor() {
|
||||
const defaultTtl = process.env.CACHE_DEFAULT_TTL ? parseInt(process.env.CACHE_DEFAULT_TTL) : 1000 * 60 * 5; // 5 mins default
|
||||
this.cache = new LRUCache({
|
||||
max: 500,
|
||||
max: 1000,
|
||||
ttl: defaultTtl,
|
||||
updateAgeOnGet: false,
|
||||
});
|
||||
|
||||
@ -9,10 +9,6 @@ let instance: CacheAdapter | null = null;
|
||||
|
||||
export const getCache = (): CacheAdapter => {
|
||||
if (!instance) {
|
||||
// Logic to choose implementation could go here
|
||||
// e.g. if (process.env.REDIS_URL) instance = new RedisCache() ...
|
||||
|
||||
console.log('[Cache] Initializing MemoryCache adapter');
|
||||
instance = new MemoryCache();
|
||||
}
|
||||
return instance;
|
||||
|
||||
@ -1,8 +1,6 @@
|
||||
import { trackUsage, updateUsageRecord } from '../middleware/usageTracking.js';
|
||||
import { FunctionRegistry, PublicEndpointRegistry, AdminEndpointRegistry } from './registry.js';
|
||||
import { logger } from './logger.js';
|
||||
import { WorkerRegistry } from '../jobs/boss/registry.js';
|
||||
import { Job } from 'pg-boss';
|
||||
|
||||
/**
|
||||
* Decorator/Wrapper to mark an endpoint as public
|
||||
@ -10,6 +8,7 @@ import { Job } from 'pg-boss';
|
||||
*/
|
||||
export function Public<T extends { method: string, path: string }>(route: T): T {
|
||||
PublicEndpointRegistry.register(route.path, route.method);
|
||||
logger.info(`[Public] Registered public route: ${route.method.toUpperCase()} ${route.path}`);
|
||||
return route;
|
||||
}
|
||||
|
||||
@ -128,6 +127,7 @@ export function Billable(options: BillableOptions) {
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Class Decorator: Registers the worker queue name
|
||||
*/
|
||||
@ -151,3 +151,100 @@ export function Worker(queueName: string) {
|
||||
(constructor as any).queueName = queueName;
|
||||
};
|
||||
}
|
||||
|
||||
import { Context } from 'hono';
|
||||
import { getCache } from './cache/index.js';
|
||||
|
||||
type KeyGenerator = (c: Context) => string;
|
||||
|
||||
const defaultKeyInfo = (c: Context) => {
|
||||
const url = new URL(c.req.url);
|
||||
url.searchParams.sort();
|
||||
return `auto-cache:${c.req.method}:${url.pathname}${url.search}`;
|
||||
};
|
||||
|
||||
export const CachedHandler = (
|
||||
handler: (c: Context) => Promise<Response>,
|
||||
options?: {
|
||||
ttl?: number,
|
||||
keyGenerator?: KeyGenerator,
|
||||
skipAuth?: boolean, // Default true
|
||||
varyByAuth?: boolean, // If true, includes auth token in key and disables skipAuth default
|
||||
maxSizeBytes?: number // Default: 1MB
|
||||
}
|
||||
) => async (c: Context) => {
|
||||
const opts = options || {};
|
||||
const ttl = opts.ttl || 300;
|
||||
const varyByAuth = opts.varyByAuth || false;
|
||||
const skipAuth = opts.skipAuth !== undefined ? opts.skipAuth : !varyByAuth; // Default true unless varyByAuth is true
|
||||
const maxSizeBytes = opts.maxSizeBytes || 1024 * 1024; // 1MB
|
||||
const keyGen = opts.keyGenerator || defaultKeyInfo;
|
||||
|
||||
// 1. Auth Bypass
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (skipAuth && authHeader) {
|
||||
// Explicitly mark as skipped due to auth
|
||||
c.header('X-Cache', 'SKIP');
|
||||
return handler(c);
|
||||
}
|
||||
|
||||
const cache = getCache();
|
||||
let key = keyGen(c);
|
||||
|
||||
// Append Auth to key if requested (User Isolation)
|
||||
if (varyByAuth && authHeader) {
|
||||
key += `|auth=${authHeader}`;
|
||||
}
|
||||
const bypass = c.req.query('cache') === 'false' || c.req.query('nocache') === 'true';
|
||||
|
||||
// 2. Hit
|
||||
if (!bypass) {
|
||||
const cached = await cache.get(key);
|
||||
if (cached) {
|
||||
c.header('X-Cache', 'HIT');
|
||||
const cachedVal = cached as any;
|
||||
if (cachedVal.contentType) c.header('Content-Type', cachedVal.contentType);
|
||||
if (varyByAuth) c.header('Vary', 'Authorization');
|
||||
return c.body(cachedVal.data);
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Miss
|
||||
const response = await handler(c);
|
||||
|
||||
// 4. Save
|
||||
if (response instanceof Response && response.ok) {
|
||||
const cloned = response.clone();
|
||||
try {
|
||||
const contentType = response.headers.get('Content-Type') || 'application/json';
|
||||
let data: any;
|
||||
|
||||
// Check content length if available
|
||||
const contentLength = cloned.headers.get('Content-Length');
|
||||
if (contentLength && parseInt(contentLength) > maxSizeBytes) {
|
||||
return response;
|
||||
}
|
||||
|
||||
if (contentType.includes('application/json')) {
|
||||
const jsonObj = await cloned.json();
|
||||
data = JSON.stringify(jsonObj);
|
||||
} else {
|
||||
data = await cloned.text();
|
||||
}
|
||||
|
||||
// Double check actual size after reading
|
||||
if (data.length > maxSizeBytes) {
|
||||
return response;
|
||||
}
|
||||
|
||||
await cache.set(key, { data, contentType }, ttl);
|
||||
c.header('X-Cache', bypass ? 'BYPASS' : 'MISS');
|
||||
if (varyByAuth) c.header('Vary', 'Authorization');
|
||||
} catch (e) {
|
||||
logger.error({ err: e }, 'Cache interception failed');
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
|
||||
@ -1,5 +1,13 @@
|
||||
import pino from 'pino';
|
||||
import path from 'path';
|
||||
import { mkdirSync } from 'fs';
|
||||
|
||||
// Ensure logs directory exists
|
||||
try {
|
||||
mkdirSync(path.join(process.cwd(), 'logs'), { recursive: true });
|
||||
} catch (err) {
|
||||
// Directory already exists
|
||||
}
|
||||
|
||||
const fileTransport = pino.transport({
|
||||
target: 'pino/file',
|
||||
@ -31,4 +39,41 @@ export const logger = pino(
|
||||
])
|
||||
);
|
||||
|
||||
// Security logger - writes to logs/security.json
|
||||
const securityFileTransport = pino.transport({
|
||||
target: 'pino/file',
|
||||
options: {
|
||||
destination: path.join(process.cwd(), 'logs', 'security.json'),
|
||||
mkdir: true
|
||||
},
|
||||
});
|
||||
|
||||
const securityConsoleTransport = pino.transport({
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
colorize: true,
|
||||
ignore: 'pid,hostname',
|
||||
destination: 1,
|
||||
},
|
||||
});
|
||||
|
||||
export const securityLogger = pino(
|
||||
{
|
||||
level: process.env.PINO_LOG_LEVEL || 'info',
|
||||
formatters: {
|
||||
level: (label) => {
|
||||
return { level: label.toUpperCase() };
|
||||
},
|
||||
},
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
base: {
|
||||
logger: 'security'
|
||||
}
|
||||
},
|
||||
pino.multistream([
|
||||
{ stream: securityFileTransport, level: 'info' },
|
||||
{ stream: securityConsoleTransport, level: 'info' },
|
||||
])
|
||||
);
|
||||
|
||||
export default logger;
|
||||
|
||||
@ -102,11 +102,26 @@ export class PublicEndpointRegistry {
|
||||
this.registry.add(`${method.toUpperCase()}:${path}`);
|
||||
}
|
||||
|
||||
static getAll(): Array<{ path: string; method: string }> {
|
||||
return Array.from(this.registry).map(entry => {
|
||||
// Split only on the FIRST colon (METHOD:PATH)
|
||||
// Don't split on colons in path parameters like :identifier
|
||||
const colonIndex = entry.indexOf(':');
|
||||
const method = entry.substring(0, colonIndex);
|
||||
const path = entry.substring(colonIndex + 1);
|
||||
return { path, method };
|
||||
});
|
||||
}
|
||||
|
||||
static isPublic(path: string, method: string): boolean {
|
||||
const methodUpper = method.toUpperCase();
|
||||
|
||||
for (const registered of this.registry) {
|
||||
const [regMethod, regPath] = registered.split(':');
|
||||
// Split only on the FIRST colon (METHOD:PATH)
|
||||
// Don't split on colons in path parameters like :identifier
|
||||
const colonIndex = registered.indexOf(':');
|
||||
const regMethod = registered.substring(0, colonIndex);
|
||||
const regPath = registered.substring(colonIndex + 1);
|
||||
|
||||
if (regMethod !== methodUpper) continue;
|
||||
|
||||
@ -115,6 +130,9 @@ export class PublicEndpointRegistry {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// Debug: log when a route is not found as public
|
||||
// console.log(`[PublicEndpointRegistry] Route not found as public: ${methodUpper} ${path}`);
|
||||
// console.log(`[PublicEndpointRegistry] Registered routes:`, Array.from(this.registry));
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -123,7 +141,7 @@ export class PublicEndpointRegistry {
|
||||
// Handle both :param (Express/Hono style) and {param} (OpenAPI style)
|
||||
// e.g., '/api/competitors/:place_id' or '/api/competitors/{place_id}' -> /^\/api\/competitors\/[^\/]+$/
|
||||
const regexPattern = pattern
|
||||
.replace(/:[^\\/]+/g, '[^/]+') // Replace :param with regex
|
||||
.replace(/:[^\/]+/g, '[^/]+') // Replace :param with regex
|
||||
.replace(/\{[^}]+\}/g, '[^/]+') // Replace {param} with regex
|
||||
.replace(/\//g, '\\/');
|
||||
|
||||
|
||||
@ -7,8 +7,14 @@ const supabaseKey = process.env.SUPABASE_SERVICE_KEY
|
||||
import { logger } from './logger.js'
|
||||
|
||||
if (!supabaseUrl || !supabaseKey) {
|
||||
logger.fatal('Missing Supabase environment variables')
|
||||
process.exit(1)
|
||||
logger.error({
|
||||
hasUrl: !!supabaseUrl,
|
||||
hasKey: !!supabaseKey,
|
||||
env: process.env.NODE_ENV
|
||||
}, 'Missing Supabase environment variables');
|
||||
// process.exit(1) // Don't exit in test mode, throw instead
|
||||
if (process.env.NODE_ENV !== 'test') process.exit(1);
|
||||
throw new Error('Missing Supabase environment variables: URL or Key is undefined');
|
||||
}
|
||||
|
||||
export const supabase = createClient(supabaseUrl, supabaseKey)
|
||||
|
||||
@ -32,12 +32,9 @@ export class WebSocketManager {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Initializing WebSocket Server...');
|
||||
this.wss = new WebSocketServer({ server, path: '/ws' });
|
||||
|
||||
this.wss.on('connection', (ws: WebSocket) => {
|
||||
console.log('Client connected');
|
||||
|
||||
ws.on('message', (message: string) => {
|
||||
try {
|
||||
const data = JSON.parse(message.toString());
|
||||
@ -54,7 +51,7 @@ export class WebSocketManager {
|
||||
});
|
||||
|
||||
ws.on('close', () => {
|
||||
console.log('Client disconnected');
|
||||
|
||||
});
|
||||
|
||||
ws.on('error', (err) => {
|
||||
|
||||
13
packages/shared/src/server/config/blocklist.json
Normal file
13
packages/shared/src/server/config/blocklist.json
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"blockedIPs": [
|
||||
"192.168.1.100",
|
||||
"10.0.0.50"
|
||||
],
|
||||
"blockedUserIds": [
|
||||
"user_banned_123",
|
||||
"user_spam_456"
|
||||
],
|
||||
"blockedTokens": [
|
||||
"Bearer malicious_token_xyz"
|
||||
]
|
||||
}
|
||||
137
packages/shared/src/server/config/products.ts
Normal file
137
packages/shared/src/server/config/products.ts
Normal file
@ -0,0 +1,137 @@
|
||||
/**
|
||||
* Product and Action Mapping Configuration
|
||||
* Defines all trackable products, their actions, and associated metadata
|
||||
*/
|
||||
|
||||
export interface ProductActionConfig {
|
||||
endpoint: string;
|
||||
method: string;
|
||||
costUnits: number;
|
||||
cancellable?: boolean; // Whether this action can be cancelled
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface ProductConfig {
|
||||
[action: string]: ProductActionConfig;
|
||||
}
|
||||
|
||||
export const PRODUCT_ACTIONS: Record<string, ProductConfig> = {
|
||||
competitors: {
|
||||
search: {
|
||||
endpoint: '/api/competitors',
|
||||
method: 'GET',
|
||||
costUnits: 1.0,
|
||||
cancellable: true, // Search can be cancelled
|
||||
description: 'Search for competitors in a location',
|
||||
},
|
||||
get_details: {
|
||||
endpoint: '/api/competitors/:place_id',
|
||||
method: 'GET',
|
||||
costUnits: 0.0,
|
||||
cancellable: false, // Quick lookup, not cancellable
|
||||
description: 'Get details for a specific competitor',
|
||||
},
|
||||
stream: {
|
||||
endpoint: '/api/competitors/stream',
|
||||
method: 'GET',
|
||||
costUnits: 1.0, // Same cost as regular search
|
||||
cancellable: true,
|
||||
description: 'Stream competitors in real-time',
|
||||
},
|
||||
find_email: {
|
||||
endpoint: '/api/find/email/{place_id}',
|
||||
method: 'GET',
|
||||
costUnits: 2.0, // Higher cost due to Puppeteer usage
|
||||
cancellable: true, // Long-running, can be cancelled
|
||||
description: 'Find email addresses for a business using Puppeteer',
|
||||
},
|
||||
},
|
||||
images: {
|
||||
upload: {
|
||||
endpoint: '/api/images',
|
||||
method: 'POST',
|
||||
costUnits: 2.0,
|
||||
cancellable: true,
|
||||
description: 'Upload an image',
|
||||
},
|
||||
get: {
|
||||
endpoint: '/api/images/:id',
|
||||
method: 'GET',
|
||||
costUnits: 0.05,
|
||||
cancellable: false,
|
||||
description: 'Retrieve an image',
|
||||
},
|
||||
update: {
|
||||
endpoint: '/api/images/:id',
|
||||
method: 'PUT',
|
||||
costUnits: 1.5,
|
||||
cancellable: false,
|
||||
description: 'Update image metadata',
|
||||
},
|
||||
},
|
||||
mock: {
|
||||
job: {
|
||||
endpoint: '/api/mock/job',
|
||||
method: 'POST',
|
||||
costUnits: 0.0,
|
||||
cancellable: true,
|
||||
description: 'Mock job for testing',
|
||||
},
|
||||
},
|
||||
// Add more products here as they are developed
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Match a request path and method to a product and action
|
||||
*/
|
||||
export function identifyProductAction(path: string, method: string): {
|
||||
product: string | null;
|
||||
action: string | null;
|
||||
config: ProductActionConfig | null;
|
||||
} {
|
||||
for (const [product, actions] of Object.entries(PRODUCT_ACTIONS)) {
|
||||
for (const [action, config] of Object.entries(actions)) {
|
||||
if (matchesRoute(path, config.endpoint) && method === config.method) {
|
||||
return { product, action, config };
|
||||
}
|
||||
}
|
||||
}
|
||||
return { product: null, action: null, config: null };
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path matches a route pattern (supports :param syntax)
|
||||
*/
|
||||
function matchesRoute(path: string, pattern: string): boolean {
|
||||
// Convert pattern to regex
|
||||
// e.g., '/api/competitors/:place_id' or '/api/competitors/{place_id}' -> /^\/api\/competitors\/[^\/]+$/
|
||||
const regexPattern = pattern
|
||||
.replace(/:[^\/]+/g, '[^/]+') // Replace :param with regex
|
||||
.replace(/\{[^}]+\}/g, '[^/]+') // Replace {param} with regex
|
||||
.replace(/\//g, '\\/'); // Escape slashes
|
||||
|
||||
// Allow optional trailing slash
|
||||
const regex = new RegExp(`^${regexPattern}\\/?$`);
|
||||
return regex.test(path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all products
|
||||
*/
|
||||
export function getAllProducts(): string[] {
|
||||
return Object.keys(PRODUCT_ACTIONS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all actions for a product
|
||||
*/
|
||||
export function getProductActions(product: string): string[] {
|
||||
return Object.keys(PRODUCT_ACTIONS[product] || {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get configuration for a specific product action
|
||||
*/
|
||||
export function getActionConfig(product: string, action: string): ProductActionConfig | null {
|
||||
return PRODUCT_ACTIONS[product]?.[action] || null;
|
||||
}
|
||||
32
packages/shared/src/server/endpoints/__tests__/admin.test.ts
Normal file
32
packages/shared/src/server/endpoints/__tests__/admin.test.ts
Normal file
@ -0,0 +1,32 @@
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest'
|
||||
import { app } from '../../index.js' // Adjust path if needed
|
||||
import { AdminEndpointRegistry } from '../../commons/registry.js'
|
||||
|
||||
describe('Admin Restart Endpoint', () => {
|
||||
beforeEach(() => {
|
||||
// Mock process.exit to prevent killing the test runner
|
||||
vi.spyOn(process, 'exit').mockImplementation((code) => {
|
||||
// console.log(`Mock process.exit(${code}) called`)
|
||||
return undefined as never
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('should be registered as an admin endpoint', () => {
|
||||
expect(AdminEndpointRegistry.isAdmin('/api/admin/system/restart', 'POST')).toBe(true)
|
||||
})
|
||||
|
||||
it('should return 401 if unauthenticated', async () => {
|
||||
const res = await app.request('/api/admin/system/restart', { method: 'POST' })
|
||||
expect(res.status).toBe(401)
|
||||
const body = await res.json()
|
||||
expect(body).toEqual({ error: 'Unauthorized - Authentication required' })
|
||||
})
|
||||
|
||||
// Mocking a full admin user flow is complex without mocking Supabase,
|
||||
// but verifying 401 proves that the middleware is intercepting the request.
|
||||
})
|
||||
@ -1,26 +1,193 @@
|
||||
import { createRoute } from '@hono/zod-openapi'
|
||||
import { StatsSchema } from '../schemas/index.js'
|
||||
import type { Context } from 'hono'
|
||||
import { Admin } from '../commons/decorators.js'
|
||||
|
||||
export const getStatsRoute = Admin(createRoute({
|
||||
method: 'get',
|
||||
path: '/api/admin/stats',
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi'
|
||||
import { logger } from '../commons/logger.js'
|
||||
import { getBanList, unbanIP, unbanUser, getViolationStats } from '../middleware/autoBan.js'
|
||||
|
||||
export const restartRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/admin/system/restart',
|
||||
tags: ['Admin'],
|
||||
summary: 'Restart the server',
|
||||
description: 'Exits the process with code 0, relying on systemd to restart it.',
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Restart initiated',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: StatsSchema,
|
||||
},
|
||||
},
|
||||
description: 'Retrieve admin stats',
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
export function getStatsHandler(c: Context) {
|
||||
return c.json({
|
||||
users: 100,
|
||||
revenue: 5000,
|
||||
}, 200)
|
||||
schema: z.object({
|
||||
message: z.string(),
|
||||
pid: z.number()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export const restartHandler = async (c: any) => {
|
||||
const pid = process.pid
|
||||
// Use a slight delay to allow the response to be sent
|
||||
setTimeout(() => {
|
||||
logger.info('Exiting process for restart...')
|
||||
process.exit(0)
|
||||
}, 1000)
|
||||
|
||||
return c.json({
|
||||
message: 'Server is restarting...',
|
||||
pid
|
||||
})
|
||||
}
|
||||
|
||||
// Ban List Routes
|
||||
export const getBanListRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/admin/bans',
|
||||
tags: ['Admin'],
|
||||
summary: 'Get current ban list',
|
||||
description: 'Returns all auto-banned IPs, users, and tokens',
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Ban list retrieved',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
bannedIPs: z.array(z.string()),
|
||||
bannedUserIds: z.array(z.string()),
|
||||
bannedTokens: z.array(z.string())
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export const getBanListHandler = async (c: any) => {
|
||||
const banList = getBanList()
|
||||
logger.info({ user: c.get('user') }, 'Admin retrieved ban list')
|
||||
return c.json(banList)
|
||||
}
|
||||
|
||||
export const unbanIPRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/admin/bans/unban-ip',
|
||||
tags: ['Admin'],
|
||||
summary: 'Unban an IP address',
|
||||
description: 'Removes an IP from the auto-ban list',
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
ip: z.string()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'IP unbanned successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export const unbanIPHandler = async (c: any) => {
|
||||
const { ip } = await c.req.json()
|
||||
const success = unbanIP(ip)
|
||||
logger.info({ user: c.get('user'), ip, success }, 'Admin attempted to unban IP')
|
||||
|
||||
return c.json({
|
||||
success,
|
||||
message: success ? `IP ${ip} has been unbanned` : `IP ${ip} was not found in ban list`
|
||||
})
|
||||
}
|
||||
|
||||
export const unbanUserRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/admin/bans/unban-user',
|
||||
tags: ['Admin'],
|
||||
summary: 'Unban a user',
|
||||
description: 'Removes a user from the auto-ban list',
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
userId: z.string()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'User unbanned successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export const unbanUserHandler = async (c: any) => {
|
||||
const { userId } = await c.req.json()
|
||||
const success = unbanUser(userId)
|
||||
logger.info({ user: c.get('user'), userId, success }, 'Admin attempted to unban user')
|
||||
|
||||
return c.json({
|
||||
success,
|
||||
message: success ? `User ${userId} has been unbanned` : `User ${userId} was not found in ban list`
|
||||
})
|
||||
}
|
||||
|
||||
export const getViolationStatsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/admin/bans/violations',
|
||||
tags: ['Admin'],
|
||||
summary: 'Get violation statistics',
|
||||
description: 'Returns current violation tracking data',
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Violation stats retrieved',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
totalViolations: z.number(),
|
||||
violations: z.array(z.object({
|
||||
key: z.string(),
|
||||
count: z.number(),
|
||||
firstViolation: z.number(),
|
||||
lastViolation: z.number()
|
||||
}))
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export const getViolationStatsHandler = async (c: any) => {
|
||||
const stats = getViolationStats()
|
||||
return c.json(stats)
|
||||
}
|
||||
|
||||
export const registerAdminRoutes = (app: OpenAPIHono) => {
|
||||
app.openapi(restartRoute, restartHandler)
|
||||
app.openapi(getBanListRoute, getBanListHandler)
|
||||
app.openapi(unbanIPRoute, unbanIPHandler)
|
||||
app.openapi(unbanUserRoute, unbanUserHandler)
|
||||
app.openapi(getViolationStatsRoute, getViolationStatsHandler)
|
||||
}
|
||||
|
||||
272
packages/shared/src/server/index.ts
Normal file
272
packages/shared/src/server/index.ts
Normal file
@ -0,0 +1,272 @@
|
||||
import './zod-setup'
|
||||
import { serve } from '@hono/node-server'
|
||||
|
||||
import { OpenAPIHono } from '@hono/zod-openapi'
|
||||
import { swaggerUI } from '@hono/swagger-ui'
|
||||
import { Scalar } from '@scalar/hono-api-reference'
|
||||
import { cors } from 'hono/cors'
|
||||
import dotenv from 'dotenv'
|
||||
import path from 'path'
|
||||
|
||||
// Load environment variables based on NODE_ENV
|
||||
const envFile = process.env.NODE_ENV === 'production' ? '.env.production' : '.env'
|
||||
dotenv.config({ path: path.resolve(process.cwd(), envFile) })
|
||||
console.log(`Loading environment from ${envFile}`)
|
||||
import { logger } from './commons/logger.js'
|
||||
|
||||
import { WebSocketManager } from './commons/websocket.js';
|
||||
|
||||
console.log('Environment Check - SUPABASE_URL:', process.env.SUPABASE_URL ? 'Defined' : 'Missing');
|
||||
console.log('Environment Check - Current Directory:', process.cwd());
|
||||
|
||||
// Import middleware
|
||||
import { blocklistMiddleware } from './middleware/blocklist.js'
|
||||
import { autoBanMiddleware } from './middleware/autoBan.js'
|
||||
import { optionalAuthMiddleware, adminMiddleware } from './middleware/auth.js'
|
||||
import { apiRateLimiter } from './middleware/rateLimiter.js'
|
||||
import { compress } from 'hono/compress'
|
||||
import { secureHeaders } from 'hono/secure-headers'
|
||||
|
||||
// Import endpoints
|
||||
|
||||
import { registerProductRoutes, startProducts } from './products/registry.js'
|
||||
|
||||
const app = new OpenAPIHono()
|
||||
|
||||
// Middleware
|
||||
app.use('/*', cors({
|
||||
origin: '*',
|
||||
allowMethods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
|
||||
allowHeaders: ['Content-Type', 'Authorization'],
|
||||
exposeHeaders: ['Content-Length', 'X-Cache'],
|
||||
maxAge: 600,
|
||||
credentials: true,
|
||||
}))
|
||||
|
||||
// Apply blocklist to all API routes (before rate limiting)
|
||||
//app.use('/api/*', blocklistMiddleware)
|
||||
// Apply auto-ban middleware (checks ban.json for auto-banned IPs/users)
|
||||
//app.use('/api/*', autoBanMiddleware)
|
||||
|
||||
|
||||
// Apply Authentication & Authorization
|
||||
app.use('/api/*', optionalAuthMiddleware)
|
||||
app.use('/api/*', adminMiddleware)
|
||||
//app.use('/api/*', apiRateLimiter)
|
||||
|
||||
// Apply compression to all API routes
|
||||
// Apply compression to all routes (API + Static Assets)
|
||||
app.use('*', compress())
|
||||
app.use(secureHeaders({
|
||||
crossOriginResourcePolicy: false,
|
||||
crossOriginOpenerPolicy: false,
|
||||
crossOriginEmbedderPolicy: false,
|
||||
xFrameOptions: false,
|
||||
contentSecurityPolicy: {
|
||||
frameAncestors: ["'self'", "*"]
|
||||
}
|
||||
}))
|
||||
|
||||
|
||||
// Register API routes
|
||||
import { serveStatic } from '@hono/node-server/serve-static'
|
||||
import { createLogRoutes, createLogHandlers } from './commons/log-routes-factory.js'
|
||||
import { registerAssetRoutes } from './serve-assets.js'
|
||||
|
||||
// System Logs
|
||||
const { getRoute: sysGetLogRoute, streamRoute: sysStreamLogRoute } = createLogRoutes('System', '/api/logs/system');
|
||||
const { getHandler: sysGetLogHandler, streamHandler: sysStreamLogHandler } = createLogHandlers(path.join(process.cwd(), 'app.log'));
|
||||
|
||||
app.openapi(sysGetLogRoute, sysGetLogHandler);
|
||||
app.openapi(sysStreamLogRoute, sysStreamLogHandler);
|
||||
|
||||
// Register Product Routes
|
||||
registerProductRoutes(app)
|
||||
|
||||
// Initialize Products
|
||||
// Products initialized after PgBoss check below
|
||||
|
||||
// API Documentation (Development Only)
|
||||
const isDevelopment = process.env.NODE_ENV !== 'production';
|
||||
|
||||
if (isDevelopment) {
|
||||
logger.info('Registering API documentation endpoints (development mode)');
|
||||
|
||||
// Swagger UI
|
||||
app.doc31('/doc', {
|
||||
openapi: '3.1.0',
|
||||
info: {
|
||||
version: '1.0.0',
|
||||
title: 'Images API',
|
||||
},
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: 'http',
|
||||
scheme: 'bearer',
|
||||
bearerFormat: 'JWT',
|
||||
},
|
||||
},
|
||||
},
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
},
|
||||
],
|
||||
} as any);
|
||||
|
||||
// Swagger UI
|
||||
app.get('/ui', swaggerUI({ url: '/doc' }));
|
||||
|
||||
// Scalar API Reference
|
||||
app.get('/reference', Scalar({
|
||||
spec: {
|
||||
url: '/doc',
|
||||
},
|
||||
authentication: {
|
||||
preferredSecurityScheme: 'bearerAuth',
|
||||
http: {
|
||||
bearer: {
|
||||
token: process.env.SCALAR_AUTH_TOKEN || '',
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any));
|
||||
|
||||
// Alternative: API Reference at /api/reference
|
||||
app.get('/api/reference', Scalar({
|
||||
spec: {
|
||||
url: '/doc',
|
||||
},
|
||||
authentication: {
|
||||
preferredSecurityScheme: 'bearerAuth',
|
||||
http: {
|
||||
bearer: {
|
||||
token: process.env.SCALAR_AUTH_TOKEN || '',
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any));
|
||||
} else {
|
||||
logger.info('API documentation endpoints disabled (production mode)');
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
import {
|
||||
postBossJobRoute, postBossJobHandler,
|
||||
getBossJobRoute, getBossJobHandler,
|
||||
cancelBossJobRoute, cancelBossJobHandler,
|
||||
resumeBossJobRoute, resumeBossJobHandler,
|
||||
completeBossJobRoute, completeBossJobHandler,
|
||||
failBossJobRoute, failBossJobHandler
|
||||
} from './endpoints/boss.js'
|
||||
|
||||
import { startBoss, stopBoss } from './jobs/boss/client.js'
|
||||
import { registerMockWorkers } from './jobs/boss/workers.js'
|
||||
|
||||
|
||||
// Register PgBoss routes
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(postBossJobRoute, postBossJobHandler)
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(getBossJobRoute, getBossJobHandler)
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(cancelBossJobRoute, cancelBossJobHandler)
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(resumeBossJobRoute, resumeBossJobHandler)
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(completeBossJobRoute, completeBossJobHandler)
|
||||
// @ts-ignore - Route type mismatch
|
||||
app.openapi(failBossJobRoute, failBossJobHandler)
|
||||
|
||||
|
||||
// Register Admin Routes
|
||||
import { registerAdminRoutes } from './endpoints/admin.js'
|
||||
import { AdminEndpointRegistry } from './commons/registry.js'
|
||||
|
||||
// Register restart endpoint as admin-only
|
||||
AdminEndpointRegistry.register('/api/admin/system/restart', 'POST')
|
||||
// Register ban management endpoints as admin-only
|
||||
AdminEndpointRegistry.register('/api/admin/bans', 'GET')
|
||||
AdminEndpointRegistry.register('/api/admin/bans/unban-ip', 'POST')
|
||||
AdminEndpointRegistry.register('/api/admin/bans/unban-user', 'POST')
|
||||
AdminEndpointRegistry.register('/api/admin/bans/violations', 'GET')
|
||||
registerAdminRoutes(app)
|
||||
|
||||
// Register Asset Routes (Static files, SW, SPA fallback)
|
||||
// IMPORTANT: This MUST be registered AFTER all API routes to prevent the catch-all from intercepting API calls
|
||||
registerAssetRoutes(app);
|
||||
|
||||
|
||||
// Initialize PgBoss
|
||||
// Initialize PgBoss and Products
|
||||
try {
|
||||
const boss = await startBoss();
|
||||
if (boss) {
|
||||
registerMockWorkers();
|
||||
try {
|
||||
await startProducts(boss);
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to init products with Boss');
|
||||
}
|
||||
} else {
|
||||
// Fallback: Start products without Boss
|
||||
logger.info('Starting products without PgBoss');
|
||||
await startProducts();
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to init PgBoss');
|
||||
// Fallback: Start products without Boss on error
|
||||
logger.info('Starting products without PgBoss (after error)');
|
||||
await startProducts();
|
||||
}
|
||||
|
||||
const port = parseInt(process.env.PORT || '3333', 10)
|
||||
logger.info(`Server is running on port ${port}`)
|
||||
// Only start the server if not in test mode
|
||||
if (process.env.NODE_ENV !== 'test' && !process.env.VITEST) {
|
||||
const server = serve({
|
||||
fetch: app.fetch,
|
||||
port
|
||||
})
|
||||
|
||||
// Initialize WebSocket Server
|
||||
if (process.env.ENABLE_WEBSOCKETS === 'true') {
|
||||
WebSocketManager.getInstance().init(server as any);
|
||||
}
|
||||
|
||||
let isShuttingDown = false;
|
||||
const gracefulShutdown = (signal: string) => {
|
||||
if (isShuttingDown) {
|
||||
logger.warn('Already shutting down...');
|
||||
return;
|
||||
}
|
||||
isShuttingDown = true;
|
||||
|
||||
// Force exit after a timeout
|
||||
const timeout = setTimeout(() => {
|
||||
logger.warn('Shutdown timed out. Forcing exit.');
|
||||
process.exit(1);
|
||||
}, 5000);
|
||||
|
||||
server.close(async (err) => {
|
||||
if (err) {
|
||||
logger.error({ err }, 'Error closing HTTP server');
|
||||
} else {
|
||||
console.log('HTTP server closed.');
|
||||
}
|
||||
|
||||
clearTimeout(timeout);
|
||||
console.log('Gracefully shut down.');
|
||||
process.exit(err ? 1 : 0);
|
||||
});
|
||||
};
|
||||
|
||||
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
|
||||
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
|
||||
process.on('SIGBREAK', () => gracefulShutdown('SIGBREAK')); // For Windows
|
||||
}
|
||||
|
||||
export { app }
|
||||
1196
packages/shared/src/server/integrations/supabase/types.ts
Normal file
1196
packages/shared/src/server/integrations/supabase/types.ts
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,15 +1,13 @@
|
||||
import { Job, PgBoss } from 'pg-boss';
|
||||
import { Job } from 'pg-boss';
|
||||
import { supabase } from '../../commons/supabase.js';
|
||||
import { logger } from '../../commons/logger.js';
|
||||
import EventEmitter from 'events';
|
||||
|
||||
import { logger } from '../../commons/logger.js';
|
||||
import { EventBus } from '../../products/EventBus.js';
|
||||
|
||||
export abstract class AbstractWorker<TData> {
|
||||
abstract readonly queueName: string;
|
||||
readonly queueOptions?: any; // pg-boss QueueOptions
|
||||
protected emitter?: EventEmitter;
|
||||
public boss?: PgBoss;
|
||||
readonly teamSize?: number;
|
||||
|
||||
// Cost calculation can be static or dynamic based on results
|
||||
abstract calculateCost(job: Job<TData>, result?: any): number;
|
||||
@ -29,7 +27,10 @@ export abstract class AbstractWorker<TData> {
|
||||
}
|
||||
|
||||
const jobId = job.id;
|
||||
const usageId = (job.data as any)?.usageId;
|
||||
|
||||
logger.info(`[${this.queueName}] Starting job ${jobId}`);
|
||||
|
||||
try {
|
||||
// 2. Execute Business Logic
|
||||
const result = await this.process(job);
|
||||
@ -37,12 +38,12 @@ export abstract class AbstractWorker<TData> {
|
||||
// 3. Calculate Cost
|
||||
const cost = this.calculateCost(job, result);
|
||||
|
||||
// Emit completion on global EventBus
|
||||
EventBus.emit('job:complete', {
|
||||
if (this.emitter) {
|
||||
this.emitter.emit('job:complete', {
|
||||
jobId,
|
||||
result,
|
||||
data: job.data
|
||||
result
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
@ -51,12 +52,6 @@ export abstract class AbstractWorker<TData> {
|
||||
|
||||
logger.error({ err: error }, `[${this.queueName}] Job failed`);
|
||||
|
||||
EventBus.emit('job:failed', {
|
||||
jobId,
|
||||
error: error.message || 'Unknown error',
|
||||
data: job.data
|
||||
});
|
||||
|
||||
throw error; // Let pg-boss handle retry/failure
|
||||
}
|
||||
}
|
||||
|
||||
165
packages/shared/src/server/middleware/auth.ts
Normal file
165
packages/shared/src/server/middleware/auth.ts
Normal file
@ -0,0 +1,165 @@
|
||||
import { Context, Next } from 'hono';
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
import { securityLogger as logger } from '../commons/logger.js';
|
||||
|
||||
import { PublicEndpointRegistry, AdminEndpointRegistry } from '../commons/registry.js';
|
||||
|
||||
|
||||
const getSupabaseCredentials = () => {
|
||||
const url = process.env.SUPABASE_URL;
|
||||
const key = process.env.SUPABASE_SERVICE_KEY;
|
||||
if (!url || !key) {
|
||||
throw new Error('Supabase credentials missing via process.env');
|
||||
}
|
||||
return { url, key };
|
||||
};
|
||||
|
||||
/**
|
||||
* Strict authentication middleware – requires a valid Bearer token.
|
||||
*/
|
||||
export async function authMiddleware(c: Context, next: Next) {
|
||||
const authHeader = c.req.header('authorization');
|
||||
if (!authHeader?.startsWith('Bearer ')) {
|
||||
return c.json({ error: 'Unauthorized - Missing or invalid authorization header' }, 401);
|
||||
}
|
||||
const token = authHeader.substring(7);
|
||||
try {
|
||||
const { url, key } = getSupabaseCredentials();
|
||||
const supabase = createClient(url, key);
|
||||
const { data: { user }, error } = await supabase.auth.getUser(token);
|
||||
if (error || !user) {
|
||||
return c.json({ error: 'Invalid or expired token' }, 401);
|
||||
}
|
||||
c.set('userId', user.id);
|
||||
c.set('user', user);
|
||||
c.set('userEmail', user.email);
|
||||
await next();
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Auth middleware error');
|
||||
return c.json({ error: 'Authentication failed' }, 401);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional authentication middleware.
|
||||
* - Public endpoint: GET /api/products (no auth required).
|
||||
* - Otherwise respects REQUIRE_AUTH flag, but skips auth in test/dev environments.
|
||||
*/
|
||||
export async function optionalAuthMiddleware(c: Context, next: Next) {
|
||||
const path = c.req.path;
|
||||
const method = c.req.method;
|
||||
|
||||
// Public endpoint – allow unauthenticated access
|
||||
const isPublicEndpoint = PublicEndpointRegistry.isPublic(path, method);
|
||||
const isProductsEndpoint = method === 'GET' && path === '/api/products';
|
||||
const registeredPublicRoutes = PublicEndpointRegistry.getAll();
|
||||
/*
|
||||
logger.info({
|
||||
path,
|
||||
method,
|
||||
isPublicEndpoint,
|
||||
isProductsEndpoint,
|
||||
registeredPublicRoutes
|
||||
}, '[Auth] Route check');*/
|
||||
|
||||
if (isProductsEndpoint || isPublicEndpoint) {
|
||||
return await next();
|
||||
}
|
||||
|
||||
const requireAuth = process.env.REQUIRE_AUTH === 'true';
|
||||
const isTestEnv = process.env.NODE_ENV === 'test' || process.env.NODE_ENV === 'development';
|
||||
const authHeader = c.req.header('authorization');
|
||||
|
||||
|
||||
|
||||
// If no auth header, or it's not a Bearer token...
|
||||
let token: string | undefined;
|
||||
|
||||
if (authHeader && authHeader.startsWith('Bearer ')) {
|
||||
token = authHeader.substring(7);
|
||||
} else {
|
||||
// Check for token in query param (for SSE)
|
||||
const queryToken = c.req.query('token');
|
||||
if (queryToken) {
|
||||
token = queryToken;
|
||||
}
|
||||
}
|
||||
|
||||
if (!token) {
|
||||
// ...and we are in test env or auth not required, just continue.
|
||||
if (!requireAuth) {
|
||||
return await next();
|
||||
}
|
||||
// ...otherwise reject
|
||||
logger.warn({ path, method }, '[Auth] Missing or invalid Authorization header/token - REJECTING');
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
// If we are here, we have a token. Verify it.
|
||||
logger.info('[Auth] Verifying token with Supabase');
|
||||
try {
|
||||
const { url, key } = getSupabaseCredentials();
|
||||
const supabase = createClient(url, key);
|
||||
const { data: { user }, error } = await supabase.auth.getUser(token);
|
||||
if (error || !user) {
|
||||
logger.warn({ error: error?.message }, '[Auth] Token verification failed');
|
||||
// In test environment, allow invalid tokens to proceed as guest
|
||||
// This supports tests that use fake tokens (e.g. blocklist tests)
|
||||
if (isTestEnv) {
|
||||
logger.info('[Auth] Invalid token but allowing in test env');
|
||||
return await next();
|
||||
}
|
||||
logger.warn({ path, method }, '[Auth] Invalid token - REJECTING');
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
// logger.info({ userId: user.id, email: user.email }, '[Auth] Token verified successfully');
|
||||
c.set('userId', user.id);
|
||||
c.set('user', user);
|
||||
c.set('userEmail', user.email);
|
||||
await next();
|
||||
} catch (err) {
|
||||
logger.error({ err }, '[Auth] Optional auth middleware error - REJECTING');
|
||||
return c.json({ error: 'Authentication failed' }, 401);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Admin‑only middleware – requires authentication and admin role.
|
||||
*/
|
||||
/**
|
||||
* Admin‑only middleware – requires authentication and admin role.
|
||||
* Checks AdminEndpointRegistry to see if the route requires admin access.
|
||||
*/
|
||||
export async function adminMiddleware(c: Context, next: Next) {
|
||||
const path = c.req.path;
|
||||
const method = c.req.method;
|
||||
|
||||
// Check if this is an admin endpoint
|
||||
if (!AdminEndpointRegistry.isAdmin(path, method)) {
|
||||
return await next();
|
||||
}
|
||||
|
||||
// If it is an admin endpoint, enforce auth and role
|
||||
const userId = c.get('userId');
|
||||
if (!userId) {
|
||||
return c.json({ error: 'Unauthorized - Authentication required' }, 401);
|
||||
}
|
||||
try {
|
||||
const { url, key } = getSupabaseCredentials();
|
||||
const supabase = createClient(url, key);
|
||||
const { data: profile, error } = await supabase
|
||||
.from('user_roles')
|
||||
.select('role')
|
||||
.eq('user_id', userId)
|
||||
.single();
|
||||
// @todo : fix db - type | multiple - currently single string
|
||||
if (error || !profile || profile.role !== 'admin') {
|
||||
return c.json({ error: 'Forbidden - Admin access required' }, 403);
|
||||
}
|
||||
c.set('isAdmin', true);
|
||||
await next();
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Admin middleware error');
|
||||
return c.json({ error: 'Authorization check failed' }, 500);
|
||||
}
|
||||
}
|
||||
449
packages/shared/src/server/middleware/autoBan.ts
Normal file
449
packages/shared/src/server/middleware/autoBan.ts
Normal file
@ -0,0 +1,449 @@
|
||||
import { Context, Next } from 'hono'
|
||||
import { readFileSync, writeFileSync } from 'fs'
|
||||
import { join } from 'path'
|
||||
import { logger, securityLogger } from '../commons/logger.js'
|
||||
|
||||
interface BanList {
|
||||
bannedIPs: string[]
|
||||
bannedUserIds: string[]
|
||||
bannedTokens: string[]
|
||||
}
|
||||
|
||||
interface ViolationRecord {
|
||||
count: number
|
||||
firstViolation: number
|
||||
lastViolation: number
|
||||
}
|
||||
|
||||
// Configuration
|
||||
const BAN_THRESHOLD = parseInt(process.env.AUTO_BAN_THRESHOLD || '5', 10) // Number of violations before ban
|
||||
const VIOLATION_WINDOW_MS = parseInt(process.env.AUTO_BAN_WINDOW_MS || '10000', 10) // 1 minute default
|
||||
const VIOLATION_CLEANUP_INTERVAL = 10000 // Clean up old violations every minute
|
||||
|
||||
console.log('Auto-ban configured with:', {
|
||||
threshold: BAN_THRESHOLD,
|
||||
window: VIOLATION_WINDOW_MS / 60000,
|
||||
cleanupInterval: VIOLATION_CLEANUP_INTERVAL / 60000
|
||||
})
|
||||
|
||||
// In-memory violation tracking
|
||||
const violations = new Map<string, ViolationRecord>()
|
||||
|
||||
let banList: BanList = {
|
||||
bannedIPs: [],
|
||||
bannedUserIds: [],
|
||||
bannedTokens: [],
|
||||
}
|
||||
|
||||
/**
|
||||
* Load ban list from JSON file
|
||||
*/
|
||||
export function loadBanList(): BanList {
|
||||
try {
|
||||
const banListPath = join(process.cwd(), 'config', 'ban.json')
|
||||
const data = readFileSync(banListPath, 'utf-8')
|
||||
banList = JSON.parse(data)
|
||||
logger.info({
|
||||
ips: banList.bannedIPs.length,
|
||||
users: banList.bannedUserIds.length,
|
||||
tokens: banList.bannedTokens.length
|
||||
}, 'Ban list loaded')
|
||||
return banList
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Failed to load ban list')
|
||||
return banList
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save ban list to JSON file
|
||||
*/
|
||||
function saveBanList(): void {
|
||||
try {
|
||||
const banListPath = join(process.cwd(), 'config', 'ban.json')
|
||||
writeFileSync(banListPath, JSON.stringify(banList, null, 4), 'utf-8')
|
||||
logger.info('Ban list saved')
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Failed to save ban list')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current ban list
|
||||
*/
|
||||
export function getBanList(): BanList {
|
||||
return banList
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an IP is banned
|
||||
*/
|
||||
export function isIPBanned(ip: string): boolean {
|
||||
return banList.bannedIPs.includes(ip)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user ID is banned
|
||||
*/
|
||||
export function isUserBanned(userId: string): boolean {
|
||||
return banList.bannedUserIds.includes(userId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an auth token is banned
|
||||
*/
|
||||
export function isTokenBanned(token: string): boolean {
|
||||
return banList.bannedTokens.includes(token)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract IP address from request
|
||||
*/
|
||||
function getClientIP(c: Context): string {
|
||||
// Check forwarded headers first (for proxies)
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
if (forwarded) {
|
||||
return forwarded.split(',')[0].trim()
|
||||
}
|
||||
|
||||
const realIp = c.req.header('x-real-ip')
|
||||
if (realIp) {
|
||||
return realIp
|
||||
}
|
||||
|
||||
// Fallback to connection IP (works for localhost)
|
||||
// In Node.js/Hono, we can try to get the remote address
|
||||
try {
|
||||
// @ts-ignore - accessing internal request object
|
||||
const remoteAddress = c.req.raw?.socket?.remoteAddress || c.env?.ip
|
||||
if (remoteAddress) {
|
||||
return remoteAddress
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
}
|
||||
|
||||
// Last resort: use localhost identifier
|
||||
return '127.0.0.1'
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract user ID from authorization header
|
||||
*/
|
||||
function getUserId(c: Context): string | null {
|
||||
const authHeader = c.req.header('authorization')
|
||||
if (!authHeader) return null
|
||||
return authHeader
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a rate limit violation
|
||||
*/
|
||||
export function recordViolation(key: string): void {
|
||||
const now = Date.now()
|
||||
const existing = violations.get(key)
|
||||
|
||||
if (existing) {
|
||||
// Check if violation is within the window
|
||||
if (now - existing.firstViolation <= VIOLATION_WINDOW_MS) {
|
||||
existing.count++
|
||||
existing.lastViolation = now
|
||||
violations.set(key, existing)
|
||||
|
||||
// Check if threshold exceeded
|
||||
if (existing.count >= BAN_THRESHOLD) {
|
||||
banEntity(key)
|
||||
}
|
||||
} else {
|
||||
// Reset violation count if outside window
|
||||
violations.set(key, {
|
||||
count: 1,
|
||||
firstViolation: now,
|
||||
lastViolation: now,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// First violation
|
||||
violations.set(key, {
|
||||
count: 1,
|
||||
firstViolation: now,
|
||||
lastViolation: now,
|
||||
})
|
||||
}
|
||||
|
||||
logger.debug({ key, violations: violations.get(key) }, 'Violation recorded')
|
||||
}
|
||||
|
||||
/**
|
||||
* Ban an entity (IP, user, or token)
|
||||
*/
|
||||
function banEntity(key: string): void {
|
||||
const [type, value] = key.split(':', 2)
|
||||
const violationRecord = violations.get(key)
|
||||
|
||||
let added = false
|
||||
if (type === 'ip' && !banList.bannedIPs.includes(value)) {
|
||||
banList.bannedIPs.push(value)
|
||||
added = true
|
||||
|
||||
// Log to security.json
|
||||
securityLogger.warn({
|
||||
event: 'auto_ban',
|
||||
type: 'ip',
|
||||
ip: value,
|
||||
violations: violationRecord?.count,
|
||||
firstViolation: violationRecord?.firstViolation,
|
||||
lastViolation: violationRecord?.lastViolation
|
||||
}, 'IP auto-banned for excessive requests')
|
||||
|
||||
// Also log to console
|
||||
logger.info({ ip: value, violations: violationRecord?.count }, '🚫 IP auto-banned for excessive requests')
|
||||
|
||||
} else if (type === 'user' && !banList.bannedUserIds.includes(value)) {
|
||||
banList.bannedUserIds.push(value)
|
||||
added = true
|
||||
|
||||
// Log to security.json
|
||||
securityLogger.warn({
|
||||
event: 'auto_ban',
|
||||
type: 'user',
|
||||
userId: value,
|
||||
violations: violationRecord?.count,
|
||||
firstViolation: violationRecord?.firstViolation,
|
||||
lastViolation: violationRecord?.lastViolation
|
||||
}, 'User auto-banned for excessive requests')
|
||||
|
||||
// Also log to console
|
||||
logger.info({ userId: value, violations: violationRecord?.count }, '🚫 User auto-banned for excessive requests')
|
||||
|
||||
} else if (type === 'token' && !banList.bannedTokens.includes(value)) {
|
||||
banList.bannedTokens.push(value)
|
||||
added = true
|
||||
|
||||
// Log to security.json
|
||||
securityLogger.warn({
|
||||
event: 'auto_ban',
|
||||
type: 'token',
|
||||
token: value.substring(0, 20) + '...',
|
||||
violations: violationRecord?.count,
|
||||
firstViolation: violationRecord?.firstViolation,
|
||||
lastViolation: violationRecord?.lastViolation
|
||||
}, 'Token auto-banned for excessive requests')
|
||||
|
||||
// Also log to console
|
||||
logger.info({ token: value.substring(0, 20) + '...', violations: violationRecord?.count }, '🚫 Token auto-banned for excessive requests')
|
||||
}
|
||||
|
||||
if (added) {
|
||||
saveBanList()
|
||||
// Clear violation record after ban
|
||||
violations.delete(key)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old violation records
|
||||
*/
|
||||
function cleanupViolations(): void {
|
||||
const now = Date.now()
|
||||
let cleaned = 0
|
||||
|
||||
for (const [key, record] of violations.entries()) {
|
||||
if (now - record.lastViolation > VIOLATION_WINDOW_MS) {
|
||||
violations.delete(key)
|
||||
cleaned++
|
||||
}
|
||||
}
|
||||
|
||||
if (cleaned > 0) {
|
||||
logger.debug({ cleaned }, 'Cleaned up old violation records')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto-ban middleware
|
||||
* Checks if request is from a banned entity
|
||||
*/
|
||||
|
||||
// Simple in-memory rate limiting
|
||||
const requestCounts = new Map<string, { count: number, resetTime: number }>()
|
||||
const RATE_LIMIT_MAX = parseInt(process.env.RATE_LIMIT_MAX || '20', 10)
|
||||
const RATE_LIMIT_WINDOW_MS = parseInt(process.env.RATE_LIMIT_WINDOW_MS || '1000', 10)
|
||||
|
||||
export async function autoBanMiddleware(c: Context, next: Next) {
|
||||
const ip = getClientIP(c)
|
||||
const authHeader = c.req.header('authorization')
|
||||
const userId = getUserId(c)
|
||||
const path = c.req.path
|
||||
const method = c.req.method
|
||||
|
||||
// Generate key for rate limiting
|
||||
let key: string
|
||||
if (authHeader) {
|
||||
key = `user:${authHeader}`
|
||||
} else {
|
||||
key = `ip:${ip}`
|
||||
}
|
||||
|
||||
// Check if IP is banned
|
||||
if (isIPBanned(ip)) {
|
||||
/*
|
||||
securityLogger.info({
|
||||
event: 'blocked_request',
|
||||
type: 'ip',
|
||||
ip,
|
||||
path,
|
||||
method
|
||||
}, 'Blocked request from banned IP')
|
||||
*/
|
||||
|
||||
// logger.info({ ip, path }, '🚫 Blocked request from banned IP')
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your IP address has been banned for excessive requests',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
// Check if auth token is banned
|
||||
if (authHeader && isTokenBanned(authHeader)) {
|
||||
securityLogger.info({
|
||||
event: 'blocked_request',
|
||||
type: 'token',
|
||||
token: authHeader.substring(0, 20) + '...',
|
||||
path,
|
||||
method
|
||||
}, 'Blocked request from banned token')
|
||||
|
||||
logger.info({ token: authHeader.substring(0, 20) + '...', path }, '🚫 Blocked request from banned token')
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your access token has been banned for excessive requests',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
// Check if user ID is banned
|
||||
if (userId && isUserBanned(userId)) {
|
||||
securityLogger.info({
|
||||
event: 'blocked_request',
|
||||
type: 'user',
|
||||
userId,
|
||||
path,
|
||||
method
|
||||
}, 'Blocked request from banned user')
|
||||
|
||||
logger.info({ userId, path }, '🚫 Blocked request from banned user')
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your account has been banned for excessive requests',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
// Built-in rate limiting (since hono-rate-limiter isn't working)
|
||||
const now = Date.now()
|
||||
const record = requestCounts.get(key)
|
||||
|
||||
if (record) {
|
||||
if (now < record.resetTime) {
|
||||
// Within the window
|
||||
record.count++
|
||||
|
||||
if (record.count > RATE_LIMIT_MAX) {
|
||||
// Rate limit exceeded!
|
||||
console.log(`⚠️ Rate limit exceeded for ${key} (${record.count}/${RATE_LIMIT_MAX})`)
|
||||
recordViolation(key)
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Too many requests',
|
||||
message: `Rate limit exceeded. Maximum ${RATE_LIMIT_MAX} requests per ${RATE_LIMIT_WINDOW_MS}ms`,
|
||||
},
|
||||
429
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// Window expired, reset
|
||||
record.count = 1
|
||||
record.resetTime = now + RATE_LIMIT_WINDOW_MS
|
||||
}
|
||||
} else {
|
||||
// First request
|
||||
requestCounts.set(key, {
|
||||
count: 1,
|
||||
resetTime: now + RATE_LIMIT_WINDOW_MS
|
||||
})
|
||||
}
|
||||
await next()
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually unban an IP
|
||||
*/
|
||||
export function unbanIP(ip: string): boolean {
|
||||
const index = banList.bannedIPs.indexOf(ip)
|
||||
if (index > -1) {
|
||||
banList.bannedIPs.splice(index, 1)
|
||||
saveBanList()
|
||||
|
||||
securityLogger.info({
|
||||
event: 'unban',
|
||||
type: 'ip',
|
||||
ip
|
||||
}, 'IP unbanned')
|
||||
|
||||
logger.info({ ip }, 'IP unbanned')
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually unban a user
|
||||
*/
|
||||
export function unbanUser(userId: string): boolean {
|
||||
const index = banList.bannedUserIds.indexOf(userId)
|
||||
if (index > -1) {
|
||||
banList.bannedUserIds.splice(index, 1)
|
||||
saveBanList()
|
||||
|
||||
securityLogger.info({
|
||||
event: 'unban',
|
||||
type: 'user',
|
||||
userId
|
||||
}, 'User unbanned')
|
||||
|
||||
logger.info({ userId }, 'User unbanned')
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current violation stats
|
||||
*/
|
||||
export function getViolationStats() {
|
||||
return {
|
||||
totalViolations: violations.size,
|
||||
violations: Array.from(violations.entries()).map(([key, record]) => ({
|
||||
key,
|
||||
...record,
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
// Load ban list on module initialization
|
||||
loadBanList()
|
||||
|
||||
// Start cleanup interval
|
||||
setInterval(cleanupViolations, VIOLATION_CLEANUP_INTERVAL)
|
||||
134
packages/shared/src/server/middleware/blocklist.ts
Normal file
134
packages/shared/src/server/middleware/blocklist.ts
Normal file
@ -0,0 +1,134 @@
|
||||
import { Context, Next } from 'hono'
|
||||
import { readFileSync } from 'fs'
|
||||
import { join, dirname } from 'path'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url)
|
||||
const __dirname = dirname(__filename)
|
||||
|
||||
interface Blocklist {
|
||||
blockedIPs: string[]
|
||||
blockedUserIds: string[]
|
||||
blockedTokens: string[]
|
||||
}
|
||||
|
||||
let blocklist: Blocklist = {
|
||||
blockedIPs: [],
|
||||
blockedUserIds: [],
|
||||
blockedTokens: [],
|
||||
}
|
||||
|
||||
/**
|
||||
* Load blocklist from JSON file
|
||||
*/
|
||||
export function loadBlocklist(): Blocklist {
|
||||
try {
|
||||
const blocklistPath = join(process.cwd(), 'config', 'blocklist.json')
|
||||
const data = readFileSync(blocklistPath, 'utf-8')
|
||||
blocklist = JSON.parse(data)
|
||||
return blocklist
|
||||
} catch (error) {
|
||||
console.error('Failed to load blocklist:', error)
|
||||
return blocklist
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current blocklist
|
||||
*/
|
||||
export function getBlocklist(): Blocklist {
|
||||
return blocklist
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an IP is blocked
|
||||
*/
|
||||
export function isIPBlocked(ip: string): boolean {
|
||||
return blocklist.blockedIPs.includes(ip)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user ID is blocked
|
||||
*/
|
||||
export function isUserBlocked(userId: string): boolean {
|
||||
return blocklist.blockedUserIds.includes(userId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an auth token is blocked
|
||||
*/
|
||||
export function isTokenBlocked(token: string): boolean {
|
||||
return blocklist.blockedTokens.includes(token)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract IP address from request
|
||||
*/
|
||||
function getClientIP(c: Context): string {
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
if (forwarded) {
|
||||
return forwarded.split(',')[0].trim()
|
||||
}
|
||||
return c.req.header('x-real-ip') || 'unknown'
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract user ID from authorization header
|
||||
* This is a simple implementation - adjust based on your auth strategy
|
||||
*/
|
||||
function getUserId(c: Context): string | null {
|
||||
const authHeader = c.req.header('authorization')
|
||||
if (!authHeader) return null
|
||||
|
||||
// Simple extraction - in production, you'd decode JWT or validate token
|
||||
// For now, we'll use the auth header as-is for blocklist checking
|
||||
return authHeader
|
||||
}
|
||||
|
||||
/**
|
||||
* Blocklist middleware
|
||||
* Blocks requests from blacklisted IPs, users, or tokens
|
||||
*/
|
||||
export async function blocklistMiddleware(c: Context, next: Next) {
|
||||
const ip = getClientIP(c)
|
||||
const authHeader = c.req.header('authorization')
|
||||
const userId = getUserId(c)
|
||||
|
||||
// Check if IP is blocked
|
||||
if (isIPBlocked(ip)) {
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your IP address has been blocked',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
// Check if auth token is blocked
|
||||
if (authHeader && isTokenBlocked(authHeader)) {
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your access token has been blocked',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
// Check if user ID is blocked
|
||||
if (userId && isUserBlocked(userId)) {
|
||||
return c.json(
|
||||
{
|
||||
error: 'Forbidden',
|
||||
message: 'Your account has been blocked',
|
||||
},
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
await next()
|
||||
}
|
||||
|
||||
// Load blocklist on module initialization
|
||||
loadBlocklist()
|
||||
106
packages/shared/src/server/middleware/rateLimiter.ts
Normal file
106
packages/shared/src/server/middleware/rateLimiter.ts
Normal file
@ -0,0 +1,106 @@
|
||||
import { Context, Next } from 'hono'
|
||||
import { rateLimiter } from 'hono-rate-limiter'
|
||||
import { recordViolation } from './autoBan.js'
|
||||
|
||||
// Rate limit configuration from environment variables
|
||||
const RATE_LIMIT_MAX = parseInt(process.env.RATE_LIMIT_MAX || '1', 10)
|
||||
const RATE_LIMIT_WINDOW_MS = parseInt(process.env.RATE_LIMIT_WINDOW_MS || '50', 10)
|
||||
|
||||
console.log('🔒 Rate Limiter Configuration:')
|
||||
console.log(` Max: ${RATE_LIMIT_MAX} requests per ${RATE_LIMIT_WINDOW_MS}ms`)
|
||||
console.log(` Auto-ban threshold: ${process.env.AUTO_BAN_THRESHOLD || 10} violations`)
|
||||
|
||||
|
||||
/**
|
||||
* Rate limiter middleware configuration
|
||||
* Limits requests per user/IP address
|
||||
*/
|
||||
export const apiRateLimiter = rateLimiter({
|
||||
windowMs: RATE_LIMIT_WINDOW_MS, // Time window in milliseconds
|
||||
limit: RATE_LIMIT_MAX, // Max requests per window
|
||||
standardHeaders: 'draft-6', // Return rate limit info in headers
|
||||
keyGenerator: (c: Context) => {
|
||||
// Try to get user ID from auth header, fallback to IP
|
||||
const authHeader = c.req.header('authorization')
|
||||
if (authHeader) {
|
||||
// Extract user ID from JWT or auth token if available
|
||||
// For now, use the auth header as key
|
||||
return `user:${authHeader}`
|
||||
}
|
||||
|
||||
// Fallback to IP address
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown'
|
||||
return `ip:${ip}`
|
||||
},
|
||||
handler: (c: Context) => {
|
||||
// Record violation for auto-ban tracking
|
||||
const authHeader = c.req.header('authorization')
|
||||
let key: string
|
||||
if (authHeader) {
|
||||
key = `user:${authHeader}`
|
||||
} else {
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown'
|
||||
key = `ip:${ip}`
|
||||
}
|
||||
|
||||
console.log(`⚠️ Rate limit exceeded for ${key}`)
|
||||
recordViolation(key)
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Too many requests',
|
||||
message: `Rate limit exceeded. Maximum ${RATE_LIMIT_MAX} requests per ${RATE_LIMIT_WINDOW_MS}ms`,
|
||||
},
|
||||
429
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
/**
|
||||
* Custom rate limiter for specific endpoints with different limits
|
||||
*/
|
||||
export function createCustomRateLimiter(limit: number, windowMs: number) {
|
||||
return rateLimiter({
|
||||
windowMs,
|
||||
limit,
|
||||
standardHeaders: 'draft-6',
|
||||
keyGenerator: (c: Context) => {
|
||||
const authHeader = c.req.header('authorization')
|
||||
if (authHeader) {
|
||||
return `user:${authHeader}`
|
||||
}
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown'
|
||||
return `ip:${ip}`
|
||||
},
|
||||
handler: (c: Context) => {
|
||||
// Record violation for auto-ban tracking
|
||||
const authHeader = c.req.header('authorization')
|
||||
let key: string
|
||||
if (authHeader) {
|
||||
key = `user:${authHeader}`
|
||||
} else {
|
||||
const forwarded = c.req.header('x-forwarded-for')
|
||||
const ip = forwarded ? forwarded.split(',')[0] : c.req.header('x-real-ip') || 'unknown'
|
||||
key = `ip:${ip}`
|
||||
}
|
||||
recordViolation(key)
|
||||
|
||||
return c.json(
|
||||
{
|
||||
error: 'Too many requests',
|
||||
message: `Rate limit exceeded. Maximum ${limit} requests per ${windowMs}ms`,
|
||||
},
|
||||
429
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Export configuration for testing
|
||||
export const rateLimitConfig = {
|
||||
max: RATE_LIMIT_MAX,
|
||||
windowMs: RATE_LIMIT_WINDOW_MS,
|
||||
}
|
||||
305
packages/shared/src/server/middleware/usageTracking.ts
Normal file
305
packages/shared/src/server/middleware/usageTracking.ts
Normal file
@ -0,0 +1,305 @@
|
||||
import { Context, Next } from 'hono';
|
||||
import { supabase } from '../commons/supabase.js';
|
||||
import { logger } from '../commons/logger.js';
|
||||
import { FunctionRegistry } from '../commons/registry.js';
|
||||
|
||||
export interface UsageData {
|
||||
userId: string;
|
||||
endpoint: string;
|
||||
method: string;
|
||||
product: string;
|
||||
action: string;
|
||||
responseStatus?: number;
|
||||
responseTimeMs?: number;
|
||||
costUnits: number;
|
||||
cancellable: boolean;
|
||||
metadata?: Record<string, any>;
|
||||
apiKeyId?: string;
|
||||
jobId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware to track API usage for billing and monitoring
|
||||
* Tracks request start and updates with completion status
|
||||
*/
|
||||
export async function usageTrackingMiddleware(c: Context, next: Next) {
|
||||
const startTime = Date.now();
|
||||
|
||||
// Extract user ID from context (set by auth middleware)
|
||||
const userId = c.get('userId');
|
||||
// Skip tracking for unauthenticated requests
|
||||
if (!userId) {
|
||||
logger.trace('[UsageTracking] Skipping - No userId');
|
||||
await next();
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine product and action
|
||||
const path = c.req.path;
|
||||
const method = c.req.method;
|
||||
|
||||
// Use Registry to find config
|
||||
const config = FunctionRegistry.findByRoute(path, method);
|
||||
const product = config?.productId;
|
||||
const action = config?.actionId;
|
||||
|
||||
logger.trace(`[UsageTracking] Identified: product=${product}, action=${action}`);
|
||||
|
||||
// Skip if not a tracked endpoint
|
||||
if (!product || !action || !config) {
|
||||
logger.info('[UsageTracking] Skipping - Not a tracked endpoint');
|
||||
await next();
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate a job ID for this request
|
||||
const jobId = `${product}_${action}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
|
||||
// Create initial usage record with 'processing' status
|
||||
let usageId: string | null = null;
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('api_usage')
|
||||
.insert({
|
||||
user_id: userId,
|
||||
endpoint: path,
|
||||
method,
|
||||
product,
|
||||
action,
|
||||
status: 'processing',
|
||||
job_id: jobId,
|
||||
cancellable: config.cancellable || false,
|
||||
cost_units: config.costUnits,
|
||||
metadata: {
|
||||
query: c.req.query(),
|
||||
userAgent: c.req.header('user-agent'),
|
||||
ip: c.req.header('x-forwarded-for') || c.req.header('x-real-ip'),
|
||||
},
|
||||
})
|
||||
.select('id')
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, '[UsageTracking] Error creating usage record');
|
||||
} else if (data) {
|
||||
logger.trace(`[UsageTracking] Created usage record: ${data.id}`);
|
||||
usageId = data.id;
|
||||
// Store usage ID in context for potential use in handlers
|
||||
c.set('usageId', usageId);
|
||||
c.set('jobId', jobId);
|
||||
} else {
|
||||
logger.trace('[UsageTracking] No data returned from insert');
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to create usage record');
|
||||
}
|
||||
|
||||
// Execute the request
|
||||
let requestError: Error | null = null;
|
||||
try {
|
||||
await next();
|
||||
} catch (err) {
|
||||
requestError = err as Error;
|
||||
throw err; // Re-throw to let error handler deal with it
|
||||
} finally {
|
||||
// Update usage record with completion status
|
||||
const endTime = Date.now();
|
||||
const responseTime = endTime - startTime;
|
||||
|
||||
if (usageId) {
|
||||
// Check if handler requested to skip status update (e.g. for background jobs)
|
||||
const skipUpdate = c.get('skipUsageStatusUpdate');
|
||||
|
||||
if (!skipUpdate) {
|
||||
updateUsageRecord({
|
||||
usageId,
|
||||
responseStatus: c.res.status,
|
||||
responseTimeMs: responseTime,
|
||||
error: requestError,
|
||||
}).catch(err => {
|
||||
logger.error({ err }, 'Failed to update usage record');
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update usage record with completion status
|
||||
*/
|
||||
export async function updateUsageRecord(data: {
|
||||
usageId: string;
|
||||
responseStatus: number;
|
||||
responseTimeMs: number;
|
||||
error?: Error | null;
|
||||
}) {
|
||||
const status = data.error
|
||||
? 'failed'
|
||||
: (data.responseStatus >= 200 && data.responseStatus < 300)
|
||||
? 'completed'
|
||||
: 'failed';
|
||||
|
||||
const updateData: any = {
|
||||
status,
|
||||
response_status: data.responseStatus,
|
||||
response_time_ms: data.responseTimeMs,
|
||||
};
|
||||
|
||||
if (data.error) {
|
||||
updateData.error_message = data.error.message;
|
||||
}
|
||||
|
||||
const { error } = await supabase
|
||||
.from('api_usage')
|
||||
.update(updateData)
|
||||
.eq('id', data.usageId);
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error updating usage record');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to manually track usage (for non-middleware scenarios)
|
||||
*/
|
||||
export async function trackUsage(data: UsageData): Promise<string | null> {
|
||||
try {
|
||||
const { data: record, error } = await supabase
|
||||
.from('api_usage')
|
||||
.insert({
|
||||
user_id: data.userId,
|
||||
endpoint: data.endpoint,
|
||||
method: data.method,
|
||||
product: data.product,
|
||||
action: data.action,
|
||||
status: data.responseStatus ? 'completed' : 'processing',
|
||||
job_id: data.jobId,
|
||||
cancellable: data.cancellable,
|
||||
response_status: data.responseStatus,
|
||||
response_time_ms: data.responseTimeMs,
|
||||
cost_units: data.costUnits,
|
||||
metadata: data.metadata,
|
||||
api_key_id: data.apiKeyId,
|
||||
})
|
||||
.select('id')
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error tracking usage');
|
||||
return null;
|
||||
}
|
||||
|
||||
return record?.id || null;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to track usage');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Cancel a job by job ID
|
||||
*/
|
||||
export async function cancelJob(userId: string, jobId: string): Promise<boolean> {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('api_usage')
|
||||
.update({
|
||||
status: 'cancelled',
|
||||
})
|
||||
.eq('user_id', userId)
|
||||
.eq('job_id', jobId)
|
||||
.eq('cancellable', true)
|
||||
.in('status', ['pending', 'processing'])
|
||||
.select('id');
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error cancelling job');
|
||||
return false;
|
||||
}
|
||||
|
||||
return !!data && data.length > 0;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to cancel job');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active (cancellable) jobs for a user
|
||||
*/
|
||||
export async function getActiveJobs(userId: string) {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('api_usage')
|
||||
.select('id, job_id, product, action, status, created_at, metadata')
|
||||
.eq('user_id', userId)
|
||||
.eq('cancellable', true)
|
||||
.in('status', ['pending', 'processing'])
|
||||
.order('created_at', { ascending: false });
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error fetching active jobs');
|
||||
return [];
|
||||
}
|
||||
|
||||
return data || [];
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to fetch active jobs');
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pause a job by job ID
|
||||
*/
|
||||
export async function pauseJob(userId: string, jobId: string): Promise<boolean> {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('api_usage')
|
||||
.update({
|
||||
status: 'paused',
|
||||
})
|
||||
.eq('user_id', userId)
|
||||
.eq('job_id', jobId)
|
||||
.eq('cancellable', true)
|
||||
.eq('status', 'processing') // Only processing jobs can be paused
|
||||
.select('id');
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error pausing job');
|
||||
return false;
|
||||
}
|
||||
|
||||
return !!data && data.length > 0;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to pause job');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume a paused job by job ID
|
||||
*/
|
||||
export async function resumeJob(userId: string, jobId: string): Promise<boolean> {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('api_usage')
|
||||
.update({
|
||||
status: 'processing',
|
||||
})
|
||||
.eq('user_id', userId)
|
||||
.eq('job_id', jobId)
|
||||
.eq('cancellable', true)
|
||||
.eq('status', 'paused') // Only paused jobs can be resumed
|
||||
.select('id');
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Error resuming job');
|
||||
return false;
|
||||
}
|
||||
|
||||
return !!data && data.length > 0;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Failed to resume job');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,4 @@
|
||||
import EventEmitter from 'events';
|
||||
import { PgBoss } from 'pg-boss';
|
||||
import { createHash } from 'crypto';
|
||||
import { streamSSE } from 'hono/streaming';
|
||||
import { EventBus } from './EventBus.js';
|
||||
@ -28,13 +27,8 @@ export abstract class AbstractProduct<TJobData = any> extends EventEmitter {
|
||||
abstract readonly workers: any[];
|
||||
abstract readonly routes: any[];
|
||||
|
||||
protected boss: PgBoss | null = null;
|
||||
protected workerSubscriptions: string[] = [];
|
||||
|
||||
async start(boss: PgBoss) {
|
||||
async start(boss?: any) {
|
||||
try {
|
||||
this.boss = boss;
|
||||
await this.registerWorkers(boss);
|
||||
await this.onStart(boss);
|
||||
} catch (error: any) {
|
||||
throw new ProductError(ProductErrorCode.START_FAILED, {
|
||||
@ -44,13 +38,12 @@ export abstract class AbstractProduct<TJobData = any> extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
protected async onStart(boss: PgBoss) {
|
||||
protected async onStart(boss?: any) {
|
||||
// Optional hook for subclasses
|
||||
}
|
||||
|
||||
async stop() {
|
||||
try {
|
||||
await this.unregisterWorkers();
|
||||
await this.onStop();
|
||||
} catch (error: any) {
|
||||
throw new ProductError(ProductErrorCode.STOP_FAILED, {
|
||||
@ -65,140 +58,11 @@ export abstract class AbstractProduct<TJobData = any> extends EventEmitter {
|
||||
}
|
||||
|
||||
async pause() {
|
||||
try {
|
||||
await this.unregisterWorkers();
|
||||
} catch (error: any) {
|
||||
throw new ProductError(ProductErrorCode.PAUSE_FAILED, {
|
||||
message: `Failed to pause product ${this.id}: ${error.message}`,
|
||||
originalError: error
|
||||
});
|
||||
}
|
||||
// No-op for now as we removed pgboss
|
||||
}
|
||||
|
||||
async resume() {
|
||||
if (!this.boss) {
|
||||
throw new ProductError(ProductErrorCode.RESUME_FAILED, 'PgBoss not initialized');
|
||||
}
|
||||
try {
|
||||
await this.registerWorkers(this.boss);
|
||||
} catch (error: any) {
|
||||
throw new ProductError(ProductErrorCode.RESUME_FAILED, {
|
||||
message: `Failed to resume product ${this.id}: ${error.message}`,
|
||||
originalError: error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
protected async registerWorkers(boss: PgBoss) {
|
||||
if (!this.workers) return;
|
||||
|
||||
for (const WorkerClass of this.workers) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
const workerInstance = new WorkerClass();
|
||||
// Inject the EventBus so the worker can emit job events
|
||||
(workerInstance as any).emitter = EventBus;
|
||||
// Inject boss instance for advanced operations like cancellation check
|
||||
(workerInstance as any).boss = boss;
|
||||
|
||||
logger.info(`[${this.id}] Registering worker for queue: ${workerInstance.queueName}`);
|
||||
|
||||
await boss.createQueue(workerInstance.queueName, workerInstance.queueOptions);
|
||||
|
||||
const workOptions = (workerInstance as any).teamSize ? { teamSize: (workerInstance as any).teamSize } : {};
|
||||
const subscriptionId = await boss.work(workerInstance.queueName, workOptions as any, (job: any) => workerInstance.handler(job));
|
||||
this.workerSubscriptions.push(subscriptionId);
|
||||
} catch (error: any) {
|
||||
throw new ProductError(ProductErrorCode.WORKER_REGISTRATION_FAILED, {
|
||||
message: `Failed to register worker for ${this.id}: ${error.message}`,
|
||||
worker: WorkerClass.name
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected async unregisterWorkers() {
|
||||
if (!this.boss) return;
|
||||
|
||||
for (const subId of this.workerSubscriptions) {
|
||||
try {
|
||||
// @ts-ignore - Assuming offWork exists in PgBoss type or at runtime
|
||||
await this.boss.offWork(subId);
|
||||
} catch (error: any) {
|
||||
logger.warn(`[${this.id}] Failed to unregister worker subscription ${subId}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
this.workerSubscriptions = [];
|
||||
}
|
||||
|
||||
async sendJob(queue: string, data: TJobData, options: any = {}) {
|
||||
if (!this.boss) {
|
||||
throw new ProductError(ProductErrorCode.JOB_SUBMISSION_FAILED, 'PgBoss not initialized');
|
||||
}
|
||||
const event: JobCreationEvent = { queue, data, options };
|
||||
// Emit event to allow subscribers to modify data/options
|
||||
EventBus.emit('job:create', event);
|
||||
|
||||
try {
|
||||
return await this.boss.send(queue, event.data, event.options);
|
||||
} catch (error: any) {
|
||||
throw new ProductError(ProductErrorCode.JOB_SUBMISSION_FAILED, {
|
||||
message: `Failed to send job to ${queue}: ${error.message}`,
|
||||
queue
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async waitForJob(jobId: string, timeoutMs: number = 60000): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const timer = setTimeout(() => {
|
||||
cleanup();
|
||||
reject(new ProductError(ProductErrorCode.JOB_TIMEOUT, { message: 'Job timeout', jobId }));
|
||||
}, timeoutMs);
|
||||
|
||||
const onComplete = (event: any) => {
|
||||
if (event.jobId === jobId) {
|
||||
cleanup();
|
||||
resolve(event.result);
|
||||
}
|
||||
};
|
||||
|
||||
const cleanup = () => {
|
||||
clearTimeout(timer);
|
||||
EventBus.off('job:complete', onComplete);
|
||||
};
|
||||
|
||||
EventBus.on('job:complete', onComplete);
|
||||
});
|
||||
}
|
||||
|
||||
async waitForHash(targetHash: string, timeoutMs: number = 60000): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const timer = setTimeout(() => {
|
||||
cleanup();
|
||||
reject(new ProductError(ProductErrorCode.JOB_TIMEOUT, { message: 'Job timeout (hash wait)', hash: targetHash }));
|
||||
}, timeoutMs);
|
||||
|
||||
const onComplete = (event: any) => {
|
||||
if (!event.data) return;
|
||||
try {
|
||||
const eventHash = this.hash(event.data);
|
||||
if (eventHash === targetHash) {
|
||||
cleanup();
|
||||
resolve(event.result);
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore hashing errors (mismatched data types from other queues)
|
||||
}
|
||||
};
|
||||
|
||||
const cleanup = () => {
|
||||
clearTimeout(timer);
|
||||
EventBus.off('job:complete', onComplete);
|
||||
};
|
||||
|
||||
EventBus.on('job:complete', onComplete);
|
||||
});
|
||||
// No-op for now as we removed pgboss
|
||||
}
|
||||
|
||||
protected async handleStream(c: any, options: StreamOptions) {
|
||||
|
||||
@ -75,7 +75,7 @@ export class AstroProduct extends AbstractProduct<any> {
|
||||
try {
|
||||
template = await loadTemplate('page');
|
||||
} catch (e) {
|
||||
return c.text('Template not found', 500);
|
||||
return c.text('handleRenderAstroPage:Template not found', 500);
|
||||
}
|
||||
|
||||
// 3. Prepare Content
|
||||
|
||||
@ -2,6 +2,7 @@ import { Context } from 'hono';
|
||||
import { AbstractProduct } from '../AbstractProduct.js';
|
||||
import { postImageRoute, getImageRoute, postResponsiveImageRoute, getImageLogsRoute, streamImageLogsRoute, renderImageRoute, postTransformRoute } from './routes.js';
|
||||
import { createLogHandlers } from '../../commons/log-routes-factory.js';
|
||||
import { PublicEndpointRegistry } from '../../commons/registry.js';
|
||||
import sharp from 'sharp';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
@ -80,8 +81,25 @@ export async function _ensureCachedImageFromUrl(url: string, width: number | und
|
||||
}
|
||||
|
||||
// 3. Fetch & Process
|
||||
const fetchRes = await fetch(url);
|
||||
if (!fetchRes.ok) throw new Error(`Failed to fetch image: ${fetchRes.statusText}`);
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), process.env.IMAGE_FETCH_TIMEOUT_MS ? parseInt(process.env.IMAGE_FETCH_TIMEOUT_MS) : 10000);
|
||||
let fetchRes: Response;
|
||||
try {
|
||||
fetchRes = await fetch(url, { signal: controller.signal });
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
|
||||
if (!fetchRes.ok) {
|
||||
logger.error({
|
||||
msg: 'Failed to fetch image',
|
||||
url,
|
||||
status: fetchRes.status,
|
||||
statusText: fetchRes.statusText,
|
||||
headers: Object.fromEntries(fetchRes.headers.entries())
|
||||
});
|
||||
throw new Error(`Failed to fetch image: ${fetchRes.statusText}`);
|
||||
}
|
||||
|
||||
const arrayBuffer = await fetchRes.arrayBuffer();
|
||||
const inputBuffer = Buffer.from(arrayBuffer);
|
||||
@ -125,6 +143,13 @@ export class ImagesProduct extends AbstractProduct<any> {
|
||||
{ definition: renderImageRoute, handler: this.handleRenderImage.bind(this) },
|
||||
{ definition: postTransformRoute, handler: this.handleTransformImage.bind(this) }
|
||||
];
|
||||
|
||||
// Register Public Endpoints
|
||||
//PublicEndpointRegistry.register('/api/images', 'POST');
|
||||
PublicEndpointRegistry.register('/api/images/responsive', 'POST');
|
||||
PublicEndpointRegistry.register('/api/images/render', 'GET');
|
||||
PublicEndpointRegistry.register('/api/images/cache/:filename', 'GET');
|
||||
// PublicEndpointRegistry.register('/api/images/transform', 'POST');
|
||||
}
|
||||
|
||||
async onStart() {
|
||||
@ -337,8 +362,24 @@ export class ImagesProduct extends AbstractProduct<any> {
|
||||
try {
|
||||
inputBuffer = await fs.readFile(sourcePath);
|
||||
} catch {
|
||||
const fetchRes = await fetch(url);
|
||||
if (!fetchRes.ok) throw new Error(`Failed to fetch URL: ${fetchRes.statusText}`);
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), 5000);
|
||||
let fetchRes;
|
||||
try {
|
||||
fetchRes = await fetch(url, { signal: controller.signal });
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
if (!fetchRes.ok) {
|
||||
logger.error({
|
||||
msg: 'Failed to fetch URL',
|
||||
url,
|
||||
status: fetchRes.status,
|
||||
statusText: fetchRes.statusText,
|
||||
headers: Object.fromEntries(fetchRes.headers.entries())
|
||||
});
|
||||
throw new Error(`Failed to fetch URL: ${fetchRes.statusText}`);
|
||||
}
|
||||
const arrayBuffer = await fetchRes.arrayBuffer();
|
||||
inputBuffer = Buffer.from(arrayBuffer);
|
||||
|
||||
@ -502,8 +543,24 @@ export class ImagesProduct extends AbstractProduct<any> {
|
||||
}
|
||||
|
||||
// 3. Fetch & Process
|
||||
const fetchRes = await fetch(url);
|
||||
if (!fetchRes.ok) throw new Error(`Failed to fetch image: ${fetchRes.statusText}`);
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), 5000);
|
||||
let fetchRes;
|
||||
try {
|
||||
fetchRes = await fetch(url, { signal: controller.signal });
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
if (!fetchRes.ok) {
|
||||
logger.error({
|
||||
msg: 'Failed to fetch image',
|
||||
url,
|
||||
status: fetchRes.status,
|
||||
statusText: fetchRes.statusText,
|
||||
headers: Object.fromEntries(fetchRes.headers.entries())
|
||||
});
|
||||
throw new Error(`Failed to fetch image: ${fetchRes.statusText}`);
|
||||
}
|
||||
|
||||
const arrayBuffer = await fetchRes.arrayBuffer();
|
||||
const inputBuffer = Buffer.from(arrayBuffer);
|
||||
|
||||
@ -2,7 +2,6 @@
|
||||
import { Context } from 'hono';
|
||||
import { AbstractProduct } from '../AbstractProduct.js';
|
||||
import PdfPrinter from 'pdfmake';
|
||||
import path from 'path';
|
||||
import { logger } from './logger.js';
|
||||
import { renderPdfRoute, renderPdfPageRoute, getProbeRoute } from './routes.js';
|
||||
|
||||
@ -334,7 +333,7 @@ export class PdfProduct extends AbstractProduct<any> {
|
||||
pdfDoc.on('end', () => {
|
||||
const result = Buffer.concat(chunks);
|
||||
c.header('Content-Type', 'application/pdf');
|
||||
c.header('Content-Disposition', `attachment; filename="${(page.title || 'page').replace(/[^a-z0-9]/gi, '_')}.pdf"`);
|
||||
c.header('Content-Disposition', `inline; filename="${(page.title || 'page').replace(/[^a-z0-9]/gi, '_')}.pdf"`);
|
||||
resolve(c.body(result));
|
||||
});
|
||||
pdfDoc.end();
|
||||
|
||||
@ -1,34 +1,59 @@
|
||||
import { PgBoss } from 'pg-boss';
|
||||
import { AbstractProduct } from './AbstractProduct.js';
|
||||
import { LocationsProduct } from './locations/index.js';
|
||||
|
||||
import './subscriber.js';
|
||||
import { ImagesProduct } from './images/index.js';
|
||||
import { VideosProduct } from './videos/index.js';
|
||||
import { ServingProduct } from './serving/index.js';
|
||||
import { EmailProduct } from './email/index.js';
|
||||
import { PdfProduct } from './pdf/index.js';
|
||||
import './subscriber.js';
|
||||
|
||||
export const ALL_PRODUCTS: AbstractProduct[] =
|
||||
[
|
||||
new LocationsProduct()
|
||||
let instances: any[] = [];
|
||||
export const ALL_PRODUCTS = instances;
|
||||
|
||||
export const registerProductRoutes = (app: any) => {
|
||||
// Instantiate all products
|
||||
instances = [
|
||||
new ImagesProduct(),
|
||||
new VideosProduct(),
|
||||
new ServingProduct(),
|
||||
new EmailProduct(),
|
||||
new PdfProduct()
|
||||
];
|
||||
|
||||
|
||||
// Helper to get all workers
|
||||
export const getAllWorkers = () => {
|
||||
return ALL_PRODUCTS.flatMap(p => p.workers || []);
|
||||
};
|
||||
|
||||
// Helper to register routes
|
||||
export const registerProductRoutes = (app: any) => {
|
||||
ALL_PRODUCTS.forEach(product => {
|
||||
product.routes.forEach(route => {
|
||||
// @ts-ignore - Hono types might mismatch slightly
|
||||
instances.forEach(product => {
|
||||
product.routes.forEach((route: any) => {
|
||||
// @ts-ignore
|
||||
app.openapi(route.definition, route.handler);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export const getAllWorkers = () => {
|
||||
return instances.flatMap(p => p.workers || []);
|
||||
};
|
||||
|
||||
export const startProducts = async (boss?: any) => {
|
||||
for (const product of instances) {
|
||||
try {
|
||||
// Create a timeout promise
|
||||
const timeoutPromise = new Promise((_, reject) => {
|
||||
const id = setTimeout(() => {
|
||||
clearTimeout(id);
|
||||
// @ts-ignore
|
||||
reject(new Error(`Product ${product?.id || 'unknown'} startup timed out`));
|
||||
}, 20000); // 5 seconds timeout
|
||||
});
|
||||
|
||||
// Helper to initialize products (lifecycle: start)
|
||||
export const startProducts = async (boss: PgBoss) => {
|
||||
for (const product of ALL_PRODUCTS) {
|
||||
await product.start(boss);
|
||||
// Race the product start against the timeout
|
||||
await Promise.race([
|
||||
product.start(boss),
|
||||
timeoutPromise
|
||||
]);
|
||||
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
console.error(`Failed to start product ${product.id}`, err);
|
||||
// Continue with other products even if one fails
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -0,0 +1,266 @@
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
|
||||
import { TEST_POST_ID } from '../../../constants.js';
|
||||
|
||||
import { app } from '../../../index.js'; // Assuming app is exported from src/index.ts or we bootstrap a test instance
|
||||
|
||||
// We prefer testing against the running Hono app using its .request() method
|
||||
// or a mock.
|
||||
|
||||
describe('Serving Product E2E', () => {
|
||||
|
||||
it('should serve rss feed at /feed.xml', async () => {
|
||||
const res = await app.request('/feed.xml');
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get('Content-Type')).toContain('xml');
|
||||
|
||||
const text = await res.text();
|
||||
|
||||
// Validate RSS structure
|
||||
expect(text).toContain('<?xml');
|
||||
expect(text).toContain('<rss');
|
||||
expect(text).toContain('</rss>');
|
||||
expect(text).toContain('<channel>');
|
||||
expect(text).toContain('</channel>');
|
||||
|
||||
// Validate feed metadata
|
||||
expect(text).toContain('Polymech Feed');
|
||||
expect(text).toContain('<title>');
|
||||
expect(text).toContain('<description>');
|
||||
expect(text).toContain('<link>');
|
||||
|
||||
// Validate at least one item exists
|
||||
expect(text).toContain('<item>');
|
||||
expect(text).toContain('</item>');
|
||||
});
|
||||
|
||||
it('should serve merchant feed at /products.xml', async () => {
|
||||
|
||||
const res = await app.request('/products.xml');
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get('Content-Type')).toContain('xml');
|
||||
const text = await res.text();
|
||||
expect(text).toContain('xmlns:g="http://base.google.com/ns/1.0"');
|
||||
|
||||
});
|
||||
|
||||
it('should serve llms.txt', async () => {
|
||||
const res = await app.request('/llms.txt');
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get('Content-Type')).toContain('text/plain');
|
||||
const text = await res.text();
|
||||
expect(text).toContain('# Polymech Media Content');
|
||||
});
|
||||
|
||||
it('should serve post metadata at /post/:id', async () => {
|
||||
// Mock global fetch to return a REAL image buffer so sharp can process it
|
||||
const originalFetch = global.fetch;
|
||||
const fs = await import('fs');
|
||||
const path = await import('path');
|
||||
const imagePath = path.join(process.cwd(), '../public/browser.png');
|
||||
const imageBuffer = fs.readFileSync(imagePath);
|
||||
|
||||
global.fetch = vi.fn().mockResolvedValue({
|
||||
ok: true,
|
||||
arrayBuffer: () => Promise.resolve(imageBuffer),
|
||||
statusText: 'OK'
|
||||
} as any);
|
||||
|
||||
try {
|
||||
/*
|
||||
const id = 'd5d1e9fc-8e0c-49d9-8a0e-78f637b47935';
|
||||
const res = await app.request(`/post/${id}`);
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get('Content-Type')).toContain('text/html');
|
||||
const text = await res.text();
|
||||
|
||||
// Cheerio might normalize doctype to uppercase
|
||||
expect(text.toLowerCase()).toContain('<!doctype html>');
|
||||
|
||||
// Check for direct cache URL usage (processed on server)
|
||||
// It should be /api/images/cache/<hash>.png (or jpeg if converted)
|
||||
// ensureCachedImage converts to the format requested or implicit.
|
||||
expect(text).toContain('/api/images/cache/');
|
||||
|
||||
// structured data check
|
||||
expect(text).toContain('<script type="application/ld+json">');
|
||||
expect(text).toContain('"@type":"SocialMediaPosting"');
|
||||
expect(text).toContain('"headline":"Test Post"');
|
||||
|
||||
// Hydration data check
|
||||
expect(text).toContain('window.ph_post =');
|
||||
expect(text).toContain('window.ph_images =');
|
||||
expect(text).toContain('"id":"d5d1e9fc-8e0c-49d9-8a0e-78f637b47935"');
|
||||
*/
|
||||
} finally {
|
||||
global.fetch = originalFetch;
|
||||
}
|
||||
});
|
||||
|
||||
it('should serve page metadata at /user/:userId/pages/:slug', async () => {
|
||||
const userId = '3bb4cfbf-318b-44d3-a9d3-35680e738421';
|
||||
const slug = 'coating-plastic-sheets-materials-methods-and-best-practices';
|
||||
|
||||
// We expect this to fail initially if the data doesn't exist in the DB during test
|
||||
// But the user requested to define this specific test.
|
||||
// Assuming the DB is seeded or we are testing against a running dev server (which likely has data).
|
||||
// Since we use 'app.request', it hits the app logic. If app is connected to real DB, it might work if data exists.
|
||||
// If app is using mocked DB or we need to mock it here?
|
||||
// The existing test uses `app.request` which implies integration test style.
|
||||
// Let's add the test Case.
|
||||
|
||||
const res = await app.request(`/user/${userId}/pages/${slug}`);
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get('Content-Type')).toContain('text/html');
|
||||
|
||||
const text = await res.text();
|
||||
|
||||
// Check Title (Smart Title) - Truncated as per observed behavior
|
||||
expect(text).toContain('Coating Plastic Sheets: Materials, Methods, and B');
|
||||
expect(text).toContain('| PolyMech"');
|
||||
|
||||
// Check Description (Extracted from Markdown)
|
||||
expect(text).toContain('Coating plastic sheets is a practical way');
|
||||
// Verify image artifacts are stripped from description
|
||||
expect(text).not.toContain('!Image');
|
||||
expect(text).not.toContain('![');
|
||||
// Note: The image URL itself might be present in the document (meta image),
|
||||
// but we want to ensure the description tag doesn't contain it mixed with text.
|
||||
// We can check the description content specifically if we parsed HTML,
|
||||
// but checking "not.toContain" globally might fail if the image URL is legitimate elsewhere.
|
||||
|
||||
// So let's check for the specific artifact sequence "!Imagehttp"
|
||||
expect(text).not.toContain('!Imagehttp');
|
||||
|
||||
// Check Image (Now using cached images)
|
||||
expect(text).toContain('/api/images/cache/');
|
||||
expect(text).toContain('.jpeg');
|
||||
// We can't easily check for the ID anymore as it's hashed, but we can check the base path
|
||||
});
|
||||
|
||||
it('should serve page content markdown at /user/:userId/pages/:slug/content', async () => {
|
||||
/*
|
||||
const userId = '3bb4cfbf-318b-44d3-a9d3-35680e738421';
|
||||
const slug = 'coating-plastic-sheets-materials-methods-and-best-practices';
|
||||
|
||||
const res = await app.request(`/user/${userId}/pages/${slug}/content`);
|
||||
expect(res.status).toBe(200);
|
||||
|
||||
const text = await res.text();
|
||||
expect(text).toContain('Overview');
|
||||
expect(text).toContain('Coating plastic sheets is a practical way');
|
||||
expect(text).toContain('![Image]');
|
||||
expect(text).toContain('/api/images/cache/');
|
||||
*/
|
||||
});
|
||||
|
||||
it('should serve markdown to bots at /user/:userId/pages/:slug', async () => {
|
||||
const userId = '3bb4cfbf-318b-44d3-a9d3-35680e738421';
|
||||
const slug = 'coating-plastic-sheets-materials-methods-and-best-practices';
|
||||
|
||||
// Simulate Googlebot
|
||||
const res = await app.request(`/user/${userId}/pages/${slug}`, {
|
||||
headers: {
|
||||
'User-Agent': 'Googlebot/2.1 (+http://www.google.com/bot.html)'
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get('Content-Type')).toContain('text/markdown');
|
||||
|
||||
const text = await res.text();
|
||||
// Check for Frontmatter presence
|
||||
expect(text).toContain('---');
|
||||
expect(text).toContain('title: "Coating Plastic Sheets');
|
||||
// Check for Markdonw content
|
||||
// Check for Markdonw content
|
||||
expect(text).toContain('Coating plastic sheets is a practical way');
|
||||
expect(text).toContain('Coating plastic sheets is a practical way');
|
||||
});
|
||||
|
||||
it('should serve static HTML with OG image and structured data at /user/:userId/pages/:slug.html', async () => {
|
||||
const userId = '3bb4cfbf-318b-44d3-a9d3-35680e738421';
|
||||
const slug = 'health-impacts-of-plastic-recycling';
|
||||
|
||||
const res = await app.request(`/user/${userId}/pages/${slug}.html`);
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get('Content-Type')).toContain('text/html');
|
||||
|
||||
const text = await res.text();
|
||||
|
||||
// Check DOCTYPE and basic HTML structure
|
||||
expect(text).toContain('<!DOCTYPE html>');
|
||||
expect(text).toContain('<html lang="en">');
|
||||
|
||||
// Check meta tags
|
||||
expect(text).toContain('<meta name="title"');
|
||||
expect(text).toContain('<meta name="description"');
|
||||
expect(text).toContain('<meta name="author"');
|
||||
|
||||
// Check Open Graph tags
|
||||
expect(text).toContain('<meta property="og:type" content="article">');
|
||||
expect(text).toContain('<meta property="og:title"');
|
||||
expect(text).toContain('<meta property="og:description"');
|
||||
expect(text).toContain('<meta property="og:image"');
|
||||
expect(text).toContain('<meta property="og:url"');
|
||||
|
||||
// Check Twitter Card tags
|
||||
expect(text).toContain('<meta name="twitter:card" content="summary_large_image">');
|
||||
expect(text).toContain('<meta name="twitter:title"');
|
||||
expect(text).toContain('<meta name="twitter:description"');
|
||||
expect(text).toContain('<meta name="twitter:image"');
|
||||
|
||||
// Check Structured Data (JSON-LD)
|
||||
expect(text).toContain('<script type="application/ld+json">');
|
||||
expect(text).toContain('"@context": "https://schema.org"');
|
||||
expect(text).toContain('"@type": "Article"');
|
||||
expect(text).toContain('"headline"');
|
||||
expect(text).toContain('"author"');
|
||||
expect(text).toContain('"@type": "Person"');
|
||||
|
||||
// Check canonical URL
|
||||
expect(text).toContain('<link rel="canonical"');
|
||||
|
||||
// Check that content is rendered as HTML (not markdown)
|
||||
expect(text).not.toContain('---'); // No frontmatter
|
||||
expect(text).toContain('<h'); // HTML headings
|
||||
expect(text).toContain('<p>'); // HTML paragraphs
|
||||
});
|
||||
|
||||
it('should serve PDF at /user/:userId/pages/:slug.pdf', async () => {
|
||||
const userId = '3bb4cfbf-318b-44d3-a9d3-35680e738421';
|
||||
const slug = 'health-impacts-of-plastic-recycling';
|
||||
|
||||
const res = await app.request(`/user/${userId}/pages/${slug}.pdf`);
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get('Content-Type')).toContain('application/pdf');
|
||||
expect(res.headers.get('Content-Disposition')).toContain('inline');
|
||||
expect(res.headers.get('Content-Disposition')).toContain('.pdf');
|
||||
|
||||
// Check that we got a PDF (starts with %PDF)
|
||||
const arrayBuffer = await res.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
const header = buffer.toString('ascii', 0, 4);
|
||||
expect(header).toBe('%PDF');
|
||||
});
|
||||
|
||||
it('should serve JSON at /user/:userId/pages/:slug.json', async () => {
|
||||
const userId = '3bb4cfbf-318b-44d3-a9d3-35680e738421';
|
||||
const slug = 'health-impacts-of-plastic-recycling';
|
||||
|
||||
const res = await app.request(`/user/${userId}/pages/${slug}.json`);
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get('Content-Type')).toContain('application/json');
|
||||
|
||||
const data = await res.json();
|
||||
expect(data).toHaveProperty('page');
|
||||
expect(data).toHaveProperty('userProfile');
|
||||
expect(data.page).toHaveProperty('id');
|
||||
expect(data.page).toHaveProperty('title');
|
||||
expect(data.page).toHaveProperty('content');
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,104 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { app } from '@/index.js';
|
||||
// Config vars from env (user provided)
|
||||
const TEST_EMAIL = process.env.TEST_EMAIL;
|
||||
const TEST_PASSWORD = process.env.TEST_PASSWORD;
|
||||
|
||||
describe('Serving Product E2E', () => {
|
||||
let authToken: string | null = null;
|
||||
|
||||
it('should login and get token', async () => {
|
||||
if (!TEST_EMAIL || !TEST_PASSWORD) {
|
||||
console.warn('Skipping login test - missing TEST_EMAIL/TEST_PASSWORD');
|
||||
return;
|
||||
}
|
||||
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
const { data, error } = await supabase.auth.signInWithPassword({
|
||||
email: TEST_EMAIL,
|
||||
password: TEST_PASSWORD
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error('Login failed:', error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
expect(data.session).toBeTruthy();
|
||||
authToken = data.session?.access_token || null;
|
||||
});
|
||||
|
||||
it('should verify pagination and caching (X-Cache)', async () => {
|
||||
// 1. Fetch Page 0 (Limit 1) -> Expect MISS
|
||||
const res1 = await app.request('/api/feed?page=0&limit=1');
|
||||
if (res1.status !== 200) console.error('P0 error:', await res1.text());
|
||||
expect(res1.status).toBe(200);
|
||||
expect(res1.headers.get('X-Cache')).toBe('MISS');
|
||||
const data1 = await res1.json();
|
||||
expect(data1.length).toBe(1);
|
||||
expect(data1[0]).toHaveProperty('likes_count');
|
||||
expect(typeof data1[0].likes_count).toBe('number');
|
||||
// Author might be null if user deleted/test data, but structure should exist in our logic
|
||||
// or at least be verifyable if we know the user exists.
|
||||
// For test data, we just inserted posts, maybe assume author exists.
|
||||
if (data1[0].author) {
|
||||
expect(data1[0].author).toHaveProperty('username');
|
||||
}
|
||||
|
||||
// Verify Responsive Object (Lazy Render URLs)
|
||||
if (data1[0].pictures && data1[0].pictures.length > 0) {
|
||||
const pic = data1[0].pictures[0];
|
||||
// Only if ENABLE_SERVER_IMAGE_OPTIMIZATION is true (default in env now)
|
||||
if (pic.responsive) {
|
||||
expect(pic.responsive).toHaveProperty('img');
|
||||
expect(pic.responsive.img).toHaveProperty('src');
|
||||
expect(pic.responsive.img.src).toContain('/api/images/cache/');
|
||||
expect(pic.responsive).toHaveProperty('sources');
|
||||
expect(Array.isArray(pic.responsive.sources)).toBe(true);
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Fetch Page 0 Again -> Expect HIT
|
||||
const res1Cached = await app.request('/api/feed?page=0&limit=1');
|
||||
expect(res1Cached.status).toBe(200);
|
||||
expect(res1Cached.headers.get('X-Cache')).toBe('HIT');
|
||||
const data1Cached = await res1Cached.json();
|
||||
expect(JSON.stringify(data1Cached)).toBe(JSON.stringify(data1));
|
||||
|
||||
// 3. Fetch Page 1 (Limit 1) -> Expect MISS (different key)
|
||||
const res2 = await app.request('/api/feed?page=1&limit=1');
|
||||
expect(res2.status).toBe(200);
|
||||
expect(res2.headers.get('X-Cache')).toBe('MISS');
|
||||
const data2 = await res2.json();
|
||||
expect(data2.length).toBe(1);
|
||||
|
||||
// 4. Verify Pagination: Page 0 item != Page 1 item
|
||||
// Only if we actually have enough data.
|
||||
if (data1.length > 0 && data2.length > 0) {
|
||||
expect(data1[0].id).not.toBe(data2[0].id);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fetch /api/me/secrets (Authorized)', async () => {
|
||||
if (!authToken) {
|
||||
console.warn('Skipping secrets test - no auth token');
|
||||
return;
|
||||
}
|
||||
|
||||
const res = await app.request('/api/me/secrets', {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${authToken}`
|
||||
}
|
||||
});
|
||||
|
||||
if (res.status === 401) {
|
||||
console.warn('Auth Middleware returned 401 - likely due to mock/env limits');
|
||||
return;
|
||||
}
|
||||
|
||||
if (res.status === 200) {
|
||||
const data = await res.json();
|
||||
expect(typeof data).toBe('object');
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,24 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { app } from '@/index.js';
|
||||
|
||||
describe('HTML Injection E2E', () => {
|
||||
it('should inject feed data into home page', async () => {
|
||||
const res = await app.request('/');
|
||||
expect(res.status).toBe(200);
|
||||
const html = await res.text();
|
||||
expect(html).toContain('window.__INITIAL_STATE__');
|
||||
// Check for specific JSON structure start or property
|
||||
expect(html).toContain('window.__INITIAL_STATE__.feed = [');
|
||||
});
|
||||
|
||||
it('should inject profile data into profile page', async () => {
|
||||
// Use default profile
|
||||
const res = await app.request('/profile/default');
|
||||
expect(res.status).toBe(200);
|
||||
const html = await res.text();
|
||||
expect(html).toContain('window.__INITIAL_STATE__');
|
||||
expect(html).toContain('window.__INITIAL_STATE__.profile = {');
|
||||
expect(html).toContain('"profile":');
|
||||
expect(html).toContain('"recentPosts":');
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,106 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { extractSiteInfo } from '../site-info.js';
|
||||
|
||||
// Mock fetch
|
||||
const fetchMock = vi.fn();
|
||||
global.fetch = fetchMock;
|
||||
|
||||
describe('extractSiteInfo', () => {
|
||||
it('should extract basic meta tags and normalize page info', async () => {
|
||||
const html = `
|
||||
<html>
|
||||
<head>
|
||||
<title>Test Page</title>
|
||||
<meta name="description" content="A test page description">
|
||||
<link rel="canonical" href="https://example.com/canonical">
|
||||
</head>
|
||||
<body></body>
|
||||
</html>
|
||||
`;
|
||||
|
||||
fetchMock.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => html,
|
||||
url: 'https://example.com/original'
|
||||
});
|
||||
|
||||
const info = await extractSiteInfo('https://example.com/test');
|
||||
expect(info.title).toBe('Test Page');
|
||||
expect(info.description).toBe('A test page description');
|
||||
expect(info.url).toBe('https://example.com/canonical');
|
||||
|
||||
// Check Normalized Page Info
|
||||
expect(info.page.title).toBe('Test Page');
|
||||
expect(info.page.description).toBe('A test page description');
|
||||
expect(info.page.url).toBe('https://example.com/canonical');
|
||||
});
|
||||
|
||||
it('should extract Open Graph data and prioritize it for page image', async () => {
|
||||
const html = `
|
||||
<html>
|
||||
<head>
|
||||
<meta property="og:title" content="OG Title">
|
||||
<meta property="og:description" content="OG Description">
|
||||
<meta property="og:image" content="https://example.com/og.jpg">
|
||||
<meta property="og:site_name" content="My Site">
|
||||
</head>
|
||||
<body></body>
|
||||
</html>
|
||||
`;
|
||||
|
||||
fetchMock.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => html,
|
||||
url: 'https://example.com/'
|
||||
});
|
||||
|
||||
const info = await extractSiteInfo('https://example.com/');
|
||||
expect(info.title).toBe('OG Title');
|
||||
expect(info.og?.image).toBe('https://example.com/og.jpg');
|
||||
|
||||
// Normalized
|
||||
expect(info.page.title).toBe('OG Title');
|
||||
expect(info.page.image).toBe('https://example.com/og.jpg');
|
||||
});
|
||||
|
||||
it('should handle YouTube URLs with specific logic', async () => {
|
||||
// Mock YouTube HTML (minimal)
|
||||
const html = `
|
||||
<html>
|
||||
<head>
|
||||
<title>YouTube Video Title</title>
|
||||
<meta property="og:image" content="https://i.ytimg.com/vi/dQw4w9WgXcQ/hqdefault.jpg">
|
||||
</head>
|
||||
<body></body>
|
||||
</html>
|
||||
`;
|
||||
|
||||
fetchMock.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => html,
|
||||
url: 'https://www.youtube.com/watch?v=dQw4w9WgXcQ'
|
||||
});
|
||||
|
||||
const info = await extractSiteInfo('https://www.youtube.com/watch?v=dQw4w9WgXcQ');
|
||||
|
||||
// Verify we got the title from HTML
|
||||
expect(info.title).toBe('YouTube Video Title');
|
||||
|
||||
// Verify we OVERRIDE the image with maxresdefault
|
||||
expect(info.page.image).toBe('https://img.youtube.com/vi/dQw4w9WgXcQ/maxresdefault.jpg');
|
||||
expect(info.images?.[0].src).toBe('https://img.youtube.com/vi/dQw4w9WgXcQ/maxresdefault.jpg');
|
||||
});
|
||||
|
||||
it('should handle YouTube short URLs (youtu.be)', async () => {
|
||||
const html = `<html><title>Short</title></html>`;
|
||||
|
||||
fetchMock.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => html,
|
||||
url: 'https://youtu.be/dQw4w9WgXcQ'
|
||||
});
|
||||
|
||||
const info = await extractSiteInfo('https://youtu.be/dQw4w9WgXcQ');
|
||||
expect(info.page.image).toBe('https://img.youtube.com/vi/dQw4w9WgXcQ/maxresdefault.jpg');
|
||||
});
|
||||
});
|
||||
20
packages/shared/src/server/products/serving/bots.ts
Normal file
20
packages/shared/src/server/products/serving/bots.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { isbot } from 'isbot';
|
||||
import { generateMarkdownFromPage } from './markdown-generator.js';
|
||||
|
||||
/**
|
||||
* Detects if the request is from a bot/crawler based on User-Agent
|
||||
*/
|
||||
export function isBotRequest(userAgent: string | undefined): boolean {
|
||||
if (!userAgent) return false;
|
||||
return isbot(userAgent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders a markdown response for bots
|
||||
* Uses the existing generateMarkdownFromPage function to create
|
||||
* a trimmed-down markdown version with frontmatter
|
||||
*/
|
||||
export function renderBotResponse(page: any, userProfile: any): string {
|
||||
const authorName = userProfile?.display_name || userProfile?.username || page.owner || 'unknown';
|
||||
return generateMarkdownFromPage(page, authorName);
|
||||
}
|
||||
591
packages/shared/src/server/products/serving/content.ts
Normal file
591
packages/shared/src/server/products/serving/content.ts
Normal file
@ -0,0 +1,591 @@
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { Context } from 'hono';
|
||||
import { logger } from './logger.js';
|
||||
import { generateRSS, generateMerchantXML, generateLLMText, generateSitemapXML } from './generators.js';
|
||||
import { loadTemplate, inject, extractDisplayImage } from './renderer.js';
|
||||
import { renderPageContent } from './db/db-pages.js';
|
||||
import { ensureCachedImageFromUrl, CACHE_DIR } from '../images/index.js';
|
||||
import { VIDEO_JOB_NAME } from '../videos/worker.js';
|
||||
import { PgBoss } from 'pg-boss';
|
||||
import { z } from '@hono/zod-openapi';
|
||||
import { createRouteBody } from './routes.js';
|
||||
|
||||
// Utils
|
||||
export const safeStringify = (data: any) => {
|
||||
return JSON.stringify(data).replace(/</g, '\\u003c');
|
||||
};
|
||||
|
||||
// --- Shared Logic ---
|
||||
|
||||
export async function augmentPosts(supabase: SupabaseClient, posts: any[], profilesMap: Record<string, any>, options: { sizesStr?: string, formatsStr?: string }, boss?: PgBoss) {
|
||||
// Feature Flags
|
||||
const includeAuthor = process.env.FEED_INCLUDE_AUTHOR !== 'false';
|
||||
const includeResponsive = process.env.FEED_INCLUDE_RESPONSIVE_IMAGES !== 'false' && process.env.ENABLE_SERVER_IMAGE_OPTIMIZATION === 'true';
|
||||
const includeLikes = process.env.FEED_INCLUDE_LIKES !== 'false';
|
||||
const includeComments = process.env.FEED_INCLUDE_COMMENTS !== 'false';
|
||||
|
||||
const { sizesStr, formatsStr } = options;
|
||||
|
||||
return Promise.all(posts.map(async post => {
|
||||
// 0. Handle Link Posts (Virtual Picture)
|
||||
if (!post.pictures || post.pictures.length === 0) {
|
||||
const settings = post.settings || {};
|
||||
if (settings && settings.link) {
|
||||
const virtualPic = {
|
||||
id: post.id,
|
||||
picture_id: post.id,
|
||||
title: post.title,
|
||||
description: post.description,
|
||||
image_url: settings.image_url || 'https://picsum.photos/seed/' + post.id + '/600/400',
|
||||
thumbnail_url: settings.thumbnail_url || null,
|
||||
type: 'page-external',
|
||||
meta: { url: settings.link },
|
||||
created_at: post.created_at,
|
||||
user_id: post.user_id,
|
||||
likes_count: 0,
|
||||
comments: [{ count: 0 }],
|
||||
visible: true,
|
||||
is_selected: false,
|
||||
position: 0
|
||||
};
|
||||
post.pictures = [virtualPic];
|
||||
}
|
||||
}
|
||||
|
||||
const pics = await Promise.all((post.pictures || []).map(async (p: any) => {
|
||||
// Generate Responsive Object if enabled
|
||||
if (includeResponsive) {
|
||||
const baseUrl = process.env.VITE_SERVER_IMAGE_API_URL || 'http://localhost:3333';
|
||||
|
||||
// Priority: Query > Env > Hardcoded
|
||||
const envSizes = process.env.DEFAULT_IMAGE_SIZES ? process.env.DEFAULT_IMAGE_SIZES.split(',').map(Number) : [320, 640, 1024];
|
||||
const envFormats = process.env.DEFAULT_IMAGE_FORMATS ? process.env.DEFAULT_IMAGE_FORMATS.split(',') : ['avif', 'webp'];
|
||||
|
||||
const sizes = sizesStr ? sizesStr.split(',').map(Number) : envSizes;
|
||||
const formats = formatsStr ? formatsStr.split(',') : envFormats;
|
||||
|
||||
const meta = p.meta || {};
|
||||
const aspect = (meta.width && meta.height) ? meta.height / meta.width : 0;
|
||||
|
||||
const isVideo = ['mux-video', 'youtube', 'tiktok', 'video-intern'].includes(p.type);
|
||||
const sourceUrl = (isVideo && p.thumbnail_url) ? p.thumbnail_url : p.image_url;
|
||||
|
||||
if (sourceUrl) {
|
||||
try {
|
||||
const makeUrl = async (w: number, fmt: string) => {
|
||||
const h = aspect ? Math.round(w * aspect) : undefined;
|
||||
const filename = await ensureCachedImageFromUrl(sourceUrl, w, h, fmt);
|
||||
return `${baseUrl}/api/images/cache/${filename}`;
|
||||
}
|
||||
|
||||
const sources = await Promise.all(formats.map(async fmt => ({
|
||||
type: `image/${fmt}`,
|
||||
srcset: (await Promise.all(sizes.map(async s => {
|
||||
const url = await makeUrl(s, fmt);
|
||||
return `${url} ${s}w`;
|
||||
}))).join(', ')
|
||||
})));
|
||||
|
||||
// Fallback (largest AVIF)
|
||||
const fallbackWidth = 1024;
|
||||
const fallbackHeight = aspect ? Math.round(fallbackWidth * aspect) : 0;
|
||||
const fallbackSrc = await makeUrl(fallbackWidth, 'avif');
|
||||
|
||||
p.responsive = {
|
||||
img: {
|
||||
src: fallbackSrc,
|
||||
width: fallbackWidth,
|
||||
height: fallbackHeight,
|
||||
format: 'avif'
|
||||
},
|
||||
sources
|
||||
};
|
||||
|
||||
// User Request: image_url should be the resolved url of our response image endpoint
|
||||
p.image_url = fallbackSrc;
|
||||
|
||||
} catch (e) {
|
||||
logger.error({ err: e, sourceUrl }, 'Failed to eager load image for feed');
|
||||
}
|
||||
}
|
||||
|
||||
// Attach Job Data for Internal Videos
|
||||
if (p.type === 'video-intern' && boss) {
|
||||
const jobId = p.meta?.mux_playback_id;
|
||||
if (jobId) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
const job = await boss.getJobById(VIDEO_JOB_NAME, jobId);
|
||||
if (job) {
|
||||
const status = job.state;
|
||||
const result = job.output as any;
|
||||
const baseUrl = process.env.SERVER_IMAGE_API_URL || process.env.VITE_SERVER_IMAGE_API_URL || 'http://localhost:3333';
|
||||
|
||||
const resultUrl = status === 'completed'
|
||||
? `${baseUrl}/api/videos/jobs/${job.id}/hls/playlist.m3u8`
|
||||
: undefined;
|
||||
|
||||
p.job = {
|
||||
id: job.id,
|
||||
status: status,
|
||||
progress: status === 'completed' ? 100 : 0,
|
||||
resultUrl,
|
||||
error: null
|
||||
};
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({ err, jobId }, 'Failed to fetch video job status');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return p;
|
||||
}));
|
||||
|
||||
// Aggregate likes from pictures
|
||||
const likesCount = includeLikes
|
||||
? pics.reduce((sum: number, p: any) => sum + (p.likes_count || 0), 0)
|
||||
: 0;
|
||||
|
||||
const commentsCount = includeComments ? 0 : 0;
|
||||
|
||||
return {
|
||||
...post,
|
||||
pictures: pics,
|
||||
likes_count: likesCount,
|
||||
comments_count: commentsCount,
|
||||
author: includeAuthor ? (profilesMap[post.user_id] || null) : null
|
||||
};
|
||||
}));
|
||||
}
|
||||
|
||||
export function applyClientSortAndCovers(posts: any[], sortBy: 'latest' | 'top') {
|
||||
// 1. Update Covers
|
||||
const postsWithCovers = posts.map(post => {
|
||||
if (!post.pictures || post.pictures.length === 0) return null;
|
||||
|
||||
const validPics = post.pictures.filter((p: any) => p.visible !== false);
|
||||
if (validPics.length === 0) return null;
|
||||
|
||||
let newCover;
|
||||
if (sortBy === 'latest') {
|
||||
// Newest picture
|
||||
newCover = validPics.reduce((newest: any, current: any) => {
|
||||
const newestDate = new Date(newest.created_at || post.created_at).getTime();
|
||||
const currentDate = new Date(current.created_at || post.created_at).getTime();
|
||||
return currentDate > newestDate ? current : newest;
|
||||
}, validPics[0]);
|
||||
} else {
|
||||
// Top/Default: First by position (existing behavior)
|
||||
const sortedByPosition = [...validPics].sort((a: any, b: any) => (a.position || 0) - (b.position || 0));
|
||||
newCover = sortedByPosition[0];
|
||||
}
|
||||
|
||||
return { ...post, cover: newCover };
|
||||
}).filter(Boolean);
|
||||
|
||||
// 2. Sort Posts
|
||||
const sorted = [...postsWithCovers];
|
||||
if (sortBy === 'top') {
|
||||
sorted.sort((a: any, b: any) => {
|
||||
const likesA = a.likes_count || 0;
|
||||
const likesB = b.likes_count || 0;
|
||||
if (likesB !== likesA) return likesB - likesA;
|
||||
return new Date(b.created_at).getTime() - new Date(a.created_at).getTime();
|
||||
});
|
||||
} else {
|
||||
// latest
|
||||
sorted.sort((a: any, b: any) => {
|
||||
const latestPicDateA = a.pictures && a.pictures.length > 0
|
||||
? Math.max(...a.pictures.map((p: any) => new Date(p.created_at || a.created_at).getTime()))
|
||||
: new Date(a.created_at).getTime();
|
||||
|
||||
const latestPicDateB = b.pictures && b.pictures.length > 0
|
||||
? Math.max(...b.pictures.map((p: any) => new Date(p.created_at || b.created_at).getTime()))
|
||||
: new Date(b.created_at).getTime();
|
||||
|
||||
return latestPicDateB - latestPicDateA;
|
||||
});
|
||||
}
|
||||
|
||||
return sorted;
|
||||
}
|
||||
|
||||
// --- Handlers ---
|
||||
|
||||
export async function handleGetFeedXml(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../commons/supabase.js');
|
||||
const { fetchFeedPostsServer } = await import('./db/index.js');
|
||||
|
||||
// Fetch feed data using the same logic as /api/feed
|
||||
const posts = await fetchFeedPostsServer(supabase, {
|
||||
page: 0,
|
||||
limit: 20,
|
||||
sortBy: 'latest'
|
||||
});
|
||||
|
||||
const xml = generateRSS(posts || []);
|
||||
c.header('Content-Type', 'application/xml');
|
||||
return c.body(xml);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'RSS Feed generation failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetMerchantFeed(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../commons/supabase.js');
|
||||
const { data: products, error } = await supabase
|
||||
.from('posts')
|
||||
.select('*')
|
||||
.order('created_at', { ascending: false })
|
||||
.limit(100);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const xml = generateMerchantXML(products || []);
|
||||
c.header('Content-Type', 'application/xml');
|
||||
return c.body(xml);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Merchant Feed generation failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetLLMText(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../commons/supabase.js');
|
||||
// Fetch posts with pictures instead of trying to join profiles
|
||||
const { data: posts, error } = await supabase
|
||||
.from('posts')
|
||||
.select('*, pictures(image_url)')
|
||||
.order('created_at', { ascending: false })
|
||||
.limit(50);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// Manual Join for Profiles
|
||||
const userIds = Array.from(new Set((posts || []).map((p: any) => p.user_id).filter(Boolean))) as string[];
|
||||
let userMap: Record<string, string> = {};
|
||||
|
||||
if (userIds.length > 0) {
|
||||
const { data: profiles } = await supabase
|
||||
.from('profiles')
|
||||
.select('user_id, username')
|
||||
.in('user_id', userIds);
|
||||
|
||||
if (profiles) {
|
||||
profiles.forEach((p: any) => {
|
||||
userMap[p.user_id] = p.username;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Map Data for Generator
|
||||
const mappedPosts = (posts || []).map((p: any) => {
|
||||
const authorName = userMap[p.user_id] || 'Unknown';
|
||||
// Use first picture as main image if available
|
||||
const imageUrl = (p.pictures && p.pictures.length > 0) ? p.pictures[0].image_url : null;
|
||||
return { ...p, author_name: authorName, image_url: imageUrl };
|
||||
});
|
||||
|
||||
const text = generateLLMText(mappedPosts);
|
||||
c.header('Content-Type', 'text/plain');
|
||||
return c.body(text);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'LLM Text generation failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetSitemap(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../commons/supabase.js');
|
||||
// Fetch public pages
|
||||
const { data: pages, error } = await supabase
|
||||
.from('pages')
|
||||
.select('slug, owner, created_at, updated_at')
|
||||
.eq('is_public', true)
|
||||
.eq('visible', true)
|
||||
.order('updated_at', { ascending: false });
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const xml = generateSitemapXML(pages || []);
|
||||
c.header('Content-Type', 'application/xml');
|
||||
return c.body(xml);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Sitemap generation failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetSiteInfo(c: Context) {
|
||||
const url = c.req.query('url');
|
||||
if (!url) return c.json({ error: 'URL is required' }, 400);
|
||||
|
||||
try {
|
||||
const { extractSiteInfo } = await import('./site-info.js');
|
||||
const info = await extractSiteInfo(url);
|
||||
return c.json(info);
|
||||
} catch (err: any) {
|
||||
logger.error({ err, url }, 'Site info extraction failed');
|
||||
return c.json({ error: 'Failed to extract site info', details: err.message }, err.status || 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetPostMeta(c: Context) {
|
||||
const id = c.req.param('id');
|
||||
try {
|
||||
// 1. Load HTML Template
|
||||
const html = await loadTemplate();
|
||||
if (!html) {
|
||||
logger.error('handleGetPostMeta:Template not found');
|
||||
return c.text('handleGetPostMeta:Template not found', 500);
|
||||
}
|
||||
|
||||
// 2. Fetch Data
|
||||
const { supabase } = await import('../../commons/supabase.js');
|
||||
const { data: post, error } = await supabase
|
||||
.from('posts')
|
||||
.select('*, pictures(*)')
|
||||
.eq('id', id)
|
||||
.single();
|
||||
|
||||
logger.info({ postId: id, found: !!post, error }, 'Fetched post for meta injection');
|
||||
if (error || !post) {
|
||||
logger.error({ error }, 'Failed to fetch post for meta injection');
|
||||
// If not found, serve the plain HTML so client handles 404
|
||||
return c.html(html);
|
||||
}
|
||||
|
||||
// Fetch Author separately (Relationship not detected automatically)
|
||||
let authorName = "Polymech User";
|
||||
if (post.user_id) {
|
||||
const { data: author } = await supabase
|
||||
.from('profiles')
|
||||
.select('username, display_name')
|
||||
.eq('user_id', post.user_id)
|
||||
.single();
|
||||
|
||||
if (author) {
|
||||
authorName = author.display_name || author.username || "Polymech User";
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inject
|
||||
// Strategy: Look at the first picture/media in the gallery.
|
||||
// For videos, use the thumbnail. For images, use the image_url.
|
||||
// Fallback to post.thumbnail_url or post.image_url if gallery empty.
|
||||
let imageUrl: string | null = null;
|
||||
let firstPic: any = null;
|
||||
|
||||
if (post.pictures && Array.isArray(post.pictures) && post.pictures.length > 0) {
|
||||
// Sort by position if available, otherwise just take the first
|
||||
const sortedPics = post.pictures.sort((a: any, b: any) => (a.position || 0) - (b.position || 0));
|
||||
firstPic = sortedPics[0];
|
||||
|
||||
// Identify video types to prefer thumbnail
|
||||
const isVideo = ['video', 'youtube', 'tiktok', 'video-intern'].includes(firstPic.type);
|
||||
|
||||
if (isVideo && firstPic.thumbnail_url) {
|
||||
imageUrl = firstPic.thumbnail_url;
|
||||
} else {
|
||||
imageUrl = firstPic.image_url;
|
||||
}
|
||||
}
|
||||
|
||||
if (!imageUrl) {
|
||||
// Fallback to post metadata
|
||||
imageUrl = post.thumbnail_url || post.image_url;
|
||||
}
|
||||
|
||||
// Construct Smart Render URL
|
||||
// Instead of pre-fetching, we use the /api/images/render endpoint which handles resizing lazily
|
||||
// This is faster for the initial request and ensures compliance with resolution limits
|
||||
let finalImageUrl = imageUrl;
|
||||
if (imageUrl) {
|
||||
const baseUrl = process.env.SERVER_IMAGE_API_URL || 'http://localhost:3333';
|
||||
// Use limit < 1024 as requested
|
||||
finalImageUrl = `${baseUrl}/api/images/render?url=${encodeURIComponent(imageUrl)}&width=1024&format=jpeg`;
|
||||
}
|
||||
|
||||
// Helpers for Text
|
||||
const truncate = (str: string, max: number) => str.length > max ? str.substring(0, max - 1) + '…' : str;
|
||||
|
||||
|
||||
|
||||
// Title Logic
|
||||
// Priority: Post Title -> Picture Title -> "Shared Post"
|
||||
const contentTitle = post.title || firstPic?.title || "Shared Post";
|
||||
const pageTitle = `${truncate(contentTitle, 50)} by ${truncate(authorName, 20)} | PolyMech`;
|
||||
|
||||
// Description Logic
|
||||
// Include description, titles, and site default
|
||||
const rawDesc = post.description || firstPic?.description || "";
|
||||
// If no description, verify if we used a fallback title for the main title?
|
||||
// Just append: "[Title] - [Description] by [Author]"
|
||||
|
||||
let description = rawDesc;
|
||||
if (!description) {
|
||||
// If no description, maybe use title again or just generic
|
||||
description = `Check out this post by ${authorName}.`;
|
||||
} else {
|
||||
description = `${truncate(description, 120)}... by ${authorName}`;
|
||||
}
|
||||
|
||||
// Append context if space allows or just rely on og:title for the main context
|
||||
// Standard approach: Description should be the summary.
|
||||
// "Title: <Title>"
|
||||
// "Desc: <Desc>"
|
||||
|
||||
const meta = {
|
||||
title: pageTitle,
|
||||
description: description,
|
||||
image: finalImageUrl || undefined,
|
||||
jsonLd: {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "SocialMediaPosting",
|
||||
"headline": truncate(contentTitle, 100),
|
||||
"image": [finalImageUrl || undefined],
|
||||
"datePublished": post.created_at,
|
||||
"author": {
|
||||
"@type": "Person",
|
||||
"name": authorName
|
||||
},
|
||||
"description": description
|
||||
}
|
||||
};
|
||||
|
||||
const injectedHtml = inject(html, meta);
|
||||
return c.html(injectedHtml);
|
||||
|
||||
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Meta injection failed');
|
||||
// Fallback to serving the file without injection
|
||||
const html = await loadTemplate();
|
||||
return c.html(html || 'Error loading application');
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetEmbed(c: Context, boss?: PgBoss) {
|
||||
const id = c.req.param('id');
|
||||
try {
|
||||
// 1. Load Embed Template
|
||||
// Ensure we look for 'embed.html'
|
||||
const html = await loadTemplate('embed.html');
|
||||
|
||||
if (!html) {
|
||||
logger.error('handleGetEmbed:Embed template not found');
|
||||
return c.text('handleGetEmbed:Embed template not found', 500);
|
||||
}
|
||||
|
||||
// 2. Fetch Data (similar to handleGetPostMeta but we need full data for the app)
|
||||
const { supabase } = await import('../../commons/supabase.js');
|
||||
|
||||
// We need: Post, Pictures, Author
|
||||
const { data: post, error } = await supabase
|
||||
.from('posts')
|
||||
.select('*, pictures(*)')
|
||||
.eq('id', id)
|
||||
.single();
|
||||
|
||||
if (error || !post) {
|
||||
return c.text('Post not found', 404);
|
||||
}
|
||||
|
||||
// Fetch Author
|
||||
const { data: author } = await supabase
|
||||
.from('profiles')
|
||||
.select('username, display_name, avatar_url')
|
||||
.eq('user_id', post.user_id)
|
||||
.single();
|
||||
|
||||
// 3. Augment with Responsive Images and Full Data
|
||||
const profilesMap = { [post.user_id]: author };
|
||||
const augmentedPosts = await augmentPosts(supabase, [post], profilesMap, {}, boss);
|
||||
const augmentedPost = augmentedPosts[0];
|
||||
|
||||
const initialState = {
|
||||
post: augmentedPost,
|
||||
mediaItems: augmentedPost.pictures,
|
||||
authorProfile: author
|
||||
};
|
||||
|
||||
const injectionScript = {
|
||||
id: 'initial-state',
|
||||
content: `window.__INITIAL_STATE__ = ${safeStringify(initialState)};`
|
||||
};
|
||||
|
||||
const injected = inject(html, {
|
||||
title: post.title || 'Embed',
|
||||
description: post.description,
|
||||
scripts: [injectionScript]
|
||||
});
|
||||
|
||||
// Allow iframe
|
||||
// c.header('X-Frame-Options', 'ALLOWALL'); // Deprecated but helpful
|
||||
// Content-Security-Policy frame-ancestors * handled by omission or explicit set
|
||||
|
||||
return c.html(injected);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Embed injection failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetEmbedPage(c: Context) {
|
||||
const id = c.req.param('id');
|
||||
try {
|
||||
// 1. Load Embed Template
|
||||
const html = await loadTemplate('embed.html');
|
||||
if (!html) return c.text('Embed template not found', 500);
|
||||
|
||||
// 2. Fetch Data
|
||||
const { supabase } = await import('../../commons/supabase.js');
|
||||
const { data: page, error } = await supabase
|
||||
.from('pages')
|
||||
.select('*')
|
||||
.eq('id', id)
|
||||
.single();
|
||||
|
||||
if (error || !page) return c.text('Page not found', 404);
|
||||
|
||||
const { data: author } = await supabase
|
||||
.from('profiles')
|
||||
.select('username, display_name, avatar_url')
|
||||
.eq('user_id', page.owner)
|
||||
.single();
|
||||
|
||||
// 3. Render Content for Initial State
|
||||
const { markdown, images } = await renderPageContent(page.content);
|
||||
|
||||
const initialState = {
|
||||
page: { ...page, content: markdown },
|
||||
// We might need to adjust what the frontend expects for a "Page" embed vs "Post"
|
||||
// Assuming main-embed.tsx can handle "page" or "post" in initialState or we normalize it.
|
||||
// For now sending as 'page' property.
|
||||
authorProfile: author
|
||||
};
|
||||
|
||||
const injectionScript = {
|
||||
id: 'initial-state',
|
||||
content: `window.__INITIAL_STATE__ = ${safeStringify(initialState)};`
|
||||
};
|
||||
|
||||
const injected = inject(html, {
|
||||
title: page.title || 'Embed',
|
||||
description: page.description,
|
||||
scripts: [injectionScript]
|
||||
});
|
||||
|
||||
return c.html(injected);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Page Embed injection failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
615
packages/shared/src/server/products/serving/db/db-categories.ts
Normal file
615
packages/shared/src/server/products/serving/db/db-categories.ts
Normal file
@ -0,0 +1,615 @@
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { createRouteBody } from '../routes.js';
|
||||
import { z } from '@hono/zod-openapi';
|
||||
import { Context } from 'hono';
|
||||
import { logger } from '../logger.js';
|
||||
|
||||
// --- Category Cache ---
|
||||
|
||||
interface CategoryCacheState {
|
||||
categories: any[];
|
||||
relations: any[];
|
||||
categoriesMap: Map<string, any>;
|
||||
relationsMap: Map<string, { parents: any[], children: any[] }>; // id -> relationships
|
||||
loaded: boolean;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
const CACHE_TTL = 1000 * 60 * 5; // 5 minutes (local cache) - though invalidation is manual
|
||||
|
||||
const categoryCache: CategoryCacheState = {
|
||||
categories: [],
|
||||
relations: [],
|
||||
categoriesMap: new Map(),
|
||||
relationsMap: new Map(),
|
||||
loaded: false,
|
||||
timestamp: 0
|
||||
};
|
||||
|
||||
export const flushCategoryCache = () => {
|
||||
categoryCache.categories = [];
|
||||
categoryCache.relations = [];
|
||||
categoryCache.categoriesMap.clear();
|
||||
categoryCache.relationsMap.clear();
|
||||
categoryCache.loaded = false;
|
||||
categoryCache.timestamp = 0;
|
||||
};
|
||||
|
||||
const ensureCategoryCache = async (supabase: SupabaseClient) => {
|
||||
const now = Date.now();
|
||||
if (categoryCache.loaded && (now - categoryCache.timestamp < CACHE_TTL)) {
|
||||
return;
|
||||
}
|
||||
const [catsRes, relsRes] = await Promise.all([
|
||||
supabase.from('categories').select('*').order('name'),
|
||||
supabase.from('category_relations').select('*')
|
||||
]);
|
||||
|
||||
if (catsRes.error) throw catsRes.error;
|
||||
if (relsRes.error) throw relsRes.error;
|
||||
|
||||
categoryCache.categories = catsRes.data || [];
|
||||
categoryCache.relations = relsRes.data || [];
|
||||
categoryCache.categoriesMap.clear();
|
||||
categoryCache.relationsMap.clear();
|
||||
|
||||
// Index Categories
|
||||
categoryCache.categories.forEach(cat => {
|
||||
categoryCache.categoriesMap.set(cat.id, cat);
|
||||
});
|
||||
|
||||
// Index Relations
|
||||
// Initialize maps
|
||||
categoryCache.categories.forEach(cat => {
|
||||
categoryCache.relationsMap.set(cat.id, { parents: [], children: [] });
|
||||
});
|
||||
|
||||
categoryCache.relations.forEach(rel => {
|
||||
const parentEntry = categoryCache.relationsMap.get(rel.parent_category_id);
|
||||
const childEntry = categoryCache.relationsMap.get(rel.child_category_id);
|
||||
|
||||
if (parentEntry) parentEntry.children.push(rel);
|
||||
if (childEntry) childEntry.parents.push(rel);
|
||||
});
|
||||
categoryCache.loaded = true;
|
||||
categoryCache.timestamp = now;
|
||||
};
|
||||
|
||||
// --- Read Functions ---
|
||||
|
||||
export const fetchCategoriesServer = async (supabase: SupabaseClient, options: {
|
||||
parentSlug?: string;
|
||||
includeChildren?: boolean;
|
||||
userId?: string;
|
||||
}) => {
|
||||
await ensureCategoryCache(supabase);
|
||||
|
||||
let result = categoryCache.categories;
|
||||
|
||||
// Filter by Parent Slug (find children of ...)
|
||||
if (options.parentSlug) {
|
||||
const parent = categoryCache.categories.find(c => c.slug === options.parentSlug);
|
||||
if (!parent) return [];
|
||||
|
||||
const relations = categoryCache.relationsMap.get(parent.id);
|
||||
if (!relations) return [];
|
||||
|
||||
const childIds = new Set(relations.children.map(r => r.child_category_id));
|
||||
result = result.filter(c => childIds.has(c.id));
|
||||
}
|
||||
|
||||
// Filter to only root-level categories (those that are not children of any other category)
|
||||
// Note: If parentSlug is provided, we usually want the children of THAT slug.
|
||||
// If includeChildren is TRUE (legacy naming?), it might mean "Fetch ALL, filtering out children"?
|
||||
// Looking at original code:
|
||||
// "Filter to only root-level categories (those that are not children of any other category)" was applied IF includeChildren was TRUE.
|
||||
// This naming is confusing in the original code.
|
||||
// "includeChildren" usually implies "give me the nested tree".
|
||||
// But original code: "if (includeChildren && data) { ... Filter out categories that are children ... }"
|
||||
// So if includeChildren is true, it returns ROOTS. If false/undefined, returns ALL?
|
||||
// Let's stick to original logic:
|
||||
|
||||
if (options.includeChildren) {
|
||||
// Get all IDs that ARE children (i.e. have a parent)
|
||||
const allChildIds = new Set(categoryCache.relations.map(r => r.child_category_id));
|
||||
result = result.filter(cat => !allChildIds.has(cat.id));
|
||||
}
|
||||
|
||||
// Enrich with nested structure if needed?
|
||||
// The original code returned:
|
||||
// select(`*, children:category_relations!parent_category_id(child:categories!child_category_id(*))`)
|
||||
// So currently we return flat list of categories, but the original query returned 'children' array on each item?
|
||||
// Wait, the original selection: `*, children:category_relations!parent_category_id(...)`
|
||||
// This attaches `children` property to each category object.
|
||||
// We must replicate this structure.
|
||||
|
||||
const enriched = result.map(cat => {
|
||||
const rels = categoryCache.relationsMap.get(cat.id);
|
||||
const childrenRels = rels?.children || [];
|
||||
|
||||
// Map relations to the structure expected:
|
||||
// children: [ { child: { ...category } } ]
|
||||
|
||||
const children = childrenRels.map(r => {
|
||||
const childCat = categoryCache.categoriesMap.get(r.child_category_id);
|
||||
return {
|
||||
...r,
|
||||
child: childCat
|
||||
};
|
||||
}).filter(item => item.child); // Ensure child exists
|
||||
|
||||
return {
|
||||
...cat,
|
||||
children
|
||||
};
|
||||
});
|
||||
|
||||
return enriched;
|
||||
};
|
||||
|
||||
export const fetchCategoryByIdServer = async (supabase: SupabaseClient, id: string) => {
|
||||
await ensureCategoryCache(supabase);
|
||||
|
||||
const cat = categoryCache.categoriesMap.get(id);
|
||||
if (!cat) return null;
|
||||
|
||||
const rels = categoryCache.relationsMap.get(id);
|
||||
|
||||
// Reconstruct parents
|
||||
// parents: [ { parent: { ... } } ]
|
||||
const parents = (rels?.parents || []).map(r => ({
|
||||
...r,
|
||||
parent: categoryCache.categoriesMap.get(r.parent_category_id)
|
||||
})).filter(item => item.parent);
|
||||
|
||||
// Reconstruct children
|
||||
// children: [ { child: { ... } } ]
|
||||
const children = (rels?.children || []).map(r => ({
|
||||
...r,
|
||||
child: categoryCache.categoriesMap.get(r.child_category_id)
|
||||
})).filter(item => item.child);
|
||||
|
||||
return {
|
||||
...cat,
|
||||
parents,
|
||||
children
|
||||
};
|
||||
};
|
||||
|
||||
export const createCategoryServer = async (supabase: SupabaseClient, category: any) => {
|
||||
// 1. Create Category
|
||||
const { data, error } = await supabase
|
||||
.from('categories')
|
||||
.insert({
|
||||
name: category.name,
|
||||
slug: category.slug,
|
||||
description: category.description,
|
||||
visibility: category.visibility,
|
||||
owner_id: category.owner_id,
|
||||
meta: category.meta || {}
|
||||
})
|
||||
.select()
|
||||
.single();
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// 2. Create Parent Relation if provided
|
||||
if (category.parentId) {
|
||||
const { error: relError } = await supabase
|
||||
.from('category_relations')
|
||||
.insert({
|
||||
parent_category_id: category.parentId,
|
||||
child_category_id: data.id,
|
||||
relation_type: category.relationType || 'generalization'
|
||||
});
|
||||
|
||||
if (relError) {
|
||||
// Attempt cleanup? Or just throw.
|
||||
throw relError;
|
||||
}
|
||||
}
|
||||
|
||||
flushCategoryCache();
|
||||
return data;
|
||||
};
|
||||
|
||||
export const updateCategoryServer = async (supabase: SupabaseClient, id: string, updates: any) => {
|
||||
const { data, error } = await supabase
|
||||
.from('categories')
|
||||
.update({
|
||||
name: updates.name,
|
||||
slug: updates.slug,
|
||||
description: updates.description,
|
||||
visibility: updates.visibility,
|
||||
...(updates.meta !== undefined && { meta: updates.meta })
|
||||
})
|
||||
.eq('id', id)
|
||||
.select()
|
||||
.single();
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
flushCategoryCache();
|
||||
return data;
|
||||
};
|
||||
|
||||
export const deleteCategoryServer = async (supabase: SupabaseClient, id: string) => {
|
||||
const { error } = await supabase
|
||||
.from('categories')
|
||||
.delete()
|
||||
.eq('id', id);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
flushCategoryCache();
|
||||
return true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetch category ancestry paths for given category IDs
|
||||
* Returns an array of paths, where each path is an array of categories from root to leaf
|
||||
*/
|
||||
export const fetchCategoryAncestry = async (
|
||||
supabase: SupabaseClient,
|
||||
baseCategoryIds: string[]
|
||||
): Promise<any[][]> => {
|
||||
if (baseCategoryIds.length === 0) return [];
|
||||
|
||||
await ensureCategoryCache(supabase);
|
||||
|
||||
const resultPaths: any[][] = [];
|
||||
|
||||
for (const baseId of baseCategoryIds) {
|
||||
// BFS or DFS to find all paths to roots?
|
||||
// Since a category can have multiple parents (poly-hierarchy), we need to find all paths.
|
||||
// Recursive approach with memoization might be best for small trees.
|
||||
|
||||
const pathsForThisId: any[][] = [];
|
||||
const visited = new Set<string>();
|
||||
|
||||
// Helper to find parents recursively
|
||||
const findPaths = (currentId: string, currentPath: any[]) => {
|
||||
const cat = categoryCache.categoriesMap.get(currentId);
|
||||
if (!cat) return; // Should not happen if data consistent
|
||||
|
||||
// Check for cycles
|
||||
if (visited.has(currentId + ':' + currentPath.length)) {
|
||||
// Simple cycle check: currentId in currentPath?
|
||||
if (currentPath.some(c => c.id === currentId)) return;
|
||||
}
|
||||
|
||||
// Add current node to start of path (building leaf -> root)
|
||||
const newPath = [cat, ...currentPath];
|
||||
|
||||
const rels = categoryCache.relationsMap.get(currentId);
|
||||
const parentRels = rels?.parents || [];
|
||||
|
||||
if (parentRels.length === 0) {
|
||||
// Root reached
|
||||
pathsForThisId.push(newPath);
|
||||
} else {
|
||||
for (const rel of parentRels) {
|
||||
findPaths(rel.parent_category_id, newPath);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
findPaths(baseId, []);
|
||||
resultPaths.push(...pathsForThisId);
|
||||
}
|
||||
|
||||
return resultPaths;
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetch category paths for multiple pages
|
||||
* Returns a Map of pageId -> category paths
|
||||
*/
|
||||
export const fetchCategoryPathsForPages = async (
|
||||
supabase: SupabaseClient,
|
||||
pages: any[]
|
||||
): Promise<Map<string, any[][]>> => {
|
||||
await ensureCategoryCache(supabase);
|
||||
|
||||
const pageCategoryMap = new Map<string, string[]>();
|
||||
const allCategoryIds = new Set<string>();
|
||||
|
||||
// Collect all category IDs from pages
|
||||
pages.forEach(page => {
|
||||
const categoryIds: string[] = [];
|
||||
if (page.meta?.categoryIds && Array.isArray(page.meta.categoryIds)) {
|
||||
categoryIds.push(...page.meta.categoryIds);
|
||||
} else if (page.meta?.categoryId) {
|
||||
categoryIds.push(page.meta.categoryId);
|
||||
}
|
||||
|
||||
if (categoryIds.length > 0) {
|
||||
pageCategoryMap.set(page.id, categoryIds);
|
||||
categoryIds.forEach(id => allCategoryIds.add(id));
|
||||
}
|
||||
});
|
||||
|
||||
if (allCategoryIds.size === 0) return new Map();
|
||||
|
||||
const categoryPathsMap = new Map<string, any[][]>();
|
||||
// Compute paths for all unique categories
|
||||
// Optimized: Pre-calculate paths for each Category ID
|
||||
const allPaths = await fetchCategoryAncestry(supabase, Array.from(allCategoryIds));
|
||||
// Group paths by leaf ID
|
||||
const pathsByLeaf = new Map<string, any[][]>();
|
||||
allPaths.forEach(path => {
|
||||
const leaf = path[path.length - 1];
|
||||
if (leaf) {
|
||||
const existing = pathsByLeaf.get(leaf.id) || [];
|
||||
existing.push(path);
|
||||
pathsByLeaf.set(leaf.id, existing);
|
||||
}
|
||||
});
|
||||
|
||||
// Map back to pages
|
||||
pageCategoryMap.forEach((categoryIds, pageId) => {
|
||||
const pagePaths: any[][] = [];
|
||||
categoryIds.forEach(catId => {
|
||||
const paths = pathsByLeaf.get(catId);
|
||||
if (paths) pagePaths.push(...paths);
|
||||
});
|
||||
|
||||
if (pagePaths.length > 0) {
|
||||
categoryPathsMap.set(pageId, pagePaths);
|
||||
}
|
||||
});
|
||||
|
||||
return categoryPathsMap;
|
||||
};
|
||||
|
||||
// Route Definitions
|
||||
export const getCategoriesRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/categories',
|
||||
['Categories'],
|
||||
'Get Categories',
|
||||
'Get Categories',
|
||||
{
|
||||
query: z.object({
|
||||
parentSlug: z.string().optional(),
|
||||
includeChildren: z.enum(['true', 'false']).optional()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Categories List',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.array(z.any())
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
false // private
|
||||
);
|
||||
|
||||
export const getCategoryRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/categories/:id',
|
||||
['Categories'],
|
||||
'Get Category',
|
||||
'Get Category',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Category Details',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: { description: 'Category not found' }
|
||||
},
|
||||
false // private
|
||||
);
|
||||
|
||||
export const createCategoryRoute = createRouteBody(
|
||||
'post',
|
||||
'/api/categories',
|
||||
['Categories'],
|
||||
'Create Category',
|
||||
'Create Category',
|
||||
{
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
description: z.string().optional(),
|
||||
visibility: z.enum(['public', 'unlisted', 'private', 'internal']).optional(),
|
||||
parentId: z.string().optional(),
|
||||
relationType: z.enum(['generalization', 'material_usage', 'domain', 'process_step', 'standard', 'other']).optional()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Category Created',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
},
|
||||
401: { description: 'Unauthorized' }
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
export const updateCategoryRoute = createRouteBody(
|
||||
'patch',
|
||||
'/api/categories/:id',
|
||||
['Categories'],
|
||||
'Update Category',
|
||||
'Update Category',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
name: z.string().optional(),
|
||||
slug: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
visibility: z.enum(['public', 'unlisted', 'private', 'internal']).optional(),
|
||||
meta: z.record(z.string(), z.any()).optional()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Category Updated',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
},
|
||||
401: { description: 'Unauthorized' }
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
export const deleteCategoryRoute = createRouteBody(
|
||||
'delete',
|
||||
'/api/categories/:id',
|
||||
['Categories'],
|
||||
'Delete Category',
|
||||
'Delete Category',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Category Deleted',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ success: z.boolean() })
|
||||
}
|
||||
}
|
||||
},
|
||||
401: { description: 'Unauthorized' }
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
// --- Handlers ---
|
||||
|
||||
export async function handleGetCategories(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const parentSlug = c.req.query('parentSlug');
|
||||
const includeChildren = c.req.query('includeChildren') === 'true';
|
||||
|
||||
// Extract user ID from Auth Token (if present)
|
||||
let userId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
userId = user?.id;
|
||||
}
|
||||
|
||||
const categories = await fetchCategoriesServer(supabase, { parentSlug, includeChildren, userId });
|
||||
return c.json(categories);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Get Categories failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetCategory(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const id = c.req.param('id');
|
||||
const category = await fetchCategoryByIdServer(supabase, id);
|
||||
|
||||
if (!category) return c.json({ error: 'Category not found' }, 404);
|
||||
return c.json(category);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Get Category failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleCreateCategory(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
// Auth check usually handled by middleware if route is protected,
|
||||
// but we need the user ID for owner_id if not provided (or to verify).
|
||||
// RLS will enforce permissions.
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (!authHeader) return c.json({ error: 'Unauthorized' }, 401);
|
||||
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
if (!user) return c.json({ error: 'Unauthorized' }, 401);
|
||||
|
||||
const body = await c.req.json();
|
||||
const categoryData = { ...body, owner_id: user.id }; // Force owner to be creator
|
||||
|
||||
const newCategory = await createCategoryServer(supabase, categoryData);
|
||||
return c.json(newCategory);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Create Category failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleUpdateCategory(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const id = c.req.param('id');
|
||||
const body = await c.req.json();
|
||||
|
||||
const authHeader = c.req.header('Authorization');
|
||||
let usedSupabase = supabase; // Default to service/global client
|
||||
|
||||
if (authHeader) {
|
||||
|
||||
}
|
||||
|
||||
const updated = await updateCategoryServer(usedSupabase, id, body);
|
||||
return c.json(updated);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Update Category failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleDeleteCategory(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const id = c.req.param('id');
|
||||
await deleteCategoryServer(supabase, id);
|
||||
|
||||
return c.json({ success: true });
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Delete Category failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
392
packages/shared/src/server/products/serving/db/db-layouts.ts
Normal file
392
packages/shared/src/server/products/serving/db/db-layouts.ts
Normal file
@ -0,0 +1,392 @@
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { createRouteBody } from '../routes.js';
|
||||
import { z } from '@hono/zod-openapi';
|
||||
import { logger } from '../logger.js';
|
||||
import { Context } from 'hono';
|
||||
|
||||
// Schema Definitions
|
||||
const LayoutVisibilityEnum = z.enum(['public', 'private', 'listed', 'custom']);
|
||||
|
||||
const LayoutSchema = z.object({
|
||||
id: z.string().uuid().optional(),
|
||||
name: z.string().min(1).max(255),
|
||||
owner_id: z.string().uuid(),
|
||||
layout_json: z.any(), // JSONB - can be any valid JSON
|
||||
type: z.string().optional().default('canvas'),
|
||||
visibility: LayoutVisibilityEnum.optional().default('private'),
|
||||
meta: z.any().optional(), // JSONB for categories, tags, etc.
|
||||
is_predefined: z.boolean().optional().default(false),
|
||||
created_at: z.string().optional(),
|
||||
updated_at: z.string().optional()
|
||||
});
|
||||
|
||||
const CreateLayoutSchema = LayoutSchema.omit({ id: true, created_at: true, updated_at: true });
|
||||
const UpdateLayoutSchema = LayoutSchema.partial().omit({ id: true, owner_id: true, created_at: true, updated_at: true });
|
||||
|
||||
// Route Definitions
|
||||
export const getLayoutsRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/layouts',
|
||||
['Layouts'],
|
||||
'Get Layouts',
|
||||
'Get all layouts for the authenticated user or public layouts',
|
||||
{
|
||||
query: z.object({
|
||||
visibility: LayoutVisibilityEnum.optional(),
|
||||
type: z.string().optional(),
|
||||
limit: z.string().optional(),
|
||||
offset: z.string().optional()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'List of layouts',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.array(LayoutSchema)
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
false // Requires authentication
|
||||
);
|
||||
|
||||
export const getLayoutRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/layouts/:id',
|
||||
['Layouts'],
|
||||
'Get Layout',
|
||||
'Get a specific layout by ID',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string().uuid()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Layout details',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: LayoutSchema
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Layout not found'
|
||||
}
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
export const createLayoutRoute = createRouteBody(
|
||||
'post',
|
||||
'/api/layouts',
|
||||
['Layouts'],
|
||||
'Create Layout',
|
||||
'Create a new layout',
|
||||
{
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: CreateLayoutSchema
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
201: {
|
||||
description: 'Layout created',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: LayoutSchema
|
||||
}
|
||||
}
|
||||
},
|
||||
400: {
|
||||
description: 'Invalid request'
|
||||
}
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
export const updateLayoutRoute = createRouteBody(
|
||||
'patch',
|
||||
'/api/layouts/:id',
|
||||
['Layouts'],
|
||||
'Update Layout',
|
||||
'Update an existing layout',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string().uuid()
|
||||
}),
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: UpdateLayoutSchema
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Layout updated',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: LayoutSchema
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Layout not found'
|
||||
}
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
export const deleteLayoutRoute = createRouteBody(
|
||||
'delete',
|
||||
'/api/layouts/:id',
|
||||
['Layouts'],
|
||||
'Delete Layout',
|
||||
'Delete a layout',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string().uuid()
|
||||
})
|
||||
},
|
||||
{
|
||||
204: {
|
||||
description: 'Layout deleted'
|
||||
},
|
||||
404: {
|
||||
description: 'Layout not found'
|
||||
}
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
// Handlers
|
||||
export async function handleGetLayouts(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
// Extract user ID from Auth Token
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (!authHeader) {
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user }, error: authError } = await supabase.auth.getUser(token);
|
||||
|
||||
if (authError || !user) {
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
// Query parameters
|
||||
const visibility = c.req.query('visibility');
|
||||
const type = c.req.query('type');
|
||||
const limit = parseInt(c.req.query('limit') || '50');
|
||||
const offset = parseInt(c.req.query('offset') || '0');
|
||||
|
||||
// Build query
|
||||
let query = supabase
|
||||
.from('layouts')
|
||||
.select('*')
|
||||
.or(`owner_id.eq.${user.id},visibility.eq.public,visibility.eq.listed`)
|
||||
.order('updated_at', { ascending: false })
|
||||
.range(offset, offset + limit - 1);
|
||||
|
||||
if (visibility) {
|
||||
query = query.eq('visibility', visibility);
|
||||
}
|
||||
|
||||
if (type) {
|
||||
query = query.eq('type', type);
|
||||
}
|
||||
|
||||
const { data, error } = await query;
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Failed to fetch layouts');
|
||||
return c.json({ error: 'Failed to fetch layouts' }, 500);
|
||||
}
|
||||
|
||||
return c.json(data);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Get layouts failed');
|
||||
return c.json({ error: 'Internal server error' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetLayout(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
const id = c.req.param('id');
|
||||
|
||||
// Extract user ID from Auth Token (optional for public layouts)
|
||||
let userId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
userId = user?.id;
|
||||
}
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('layouts')
|
||||
.select('*')
|
||||
.eq('id', id)
|
||||
.single();
|
||||
|
||||
if (error || !data) {
|
||||
return c.json({ error: 'Layout not found' }, 404);
|
||||
}
|
||||
|
||||
// Check access permissions
|
||||
const isOwner = userId === data.owner_id;
|
||||
const isPublic = data.visibility === 'public' || data.visibility === 'listed';
|
||||
|
||||
if (!isOwner && !isPublic) {
|
||||
return c.json({ error: 'Forbidden' }, 403);
|
||||
}
|
||||
|
||||
return c.json(data);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Get layout failed');
|
||||
return c.json({ error: 'Internal server error' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleCreateLayout(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
// Extract user ID from Auth Token
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (!authHeader) {
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user }, error: authError } = await supabase.auth.getUser(token);
|
||||
|
||||
if (authError || !user) {
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
const body = await c.req.json();
|
||||
|
||||
// Validate and parse body
|
||||
const validatedData = CreateLayoutSchema.parse(body);
|
||||
|
||||
// Ensure owner_id matches authenticated user
|
||||
const layoutData = {
|
||||
...validatedData,
|
||||
owner_id: user.id
|
||||
};
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('layouts')
|
||||
.insert(layoutData)
|
||||
.select()
|
||||
.single();
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Failed to create layout');
|
||||
return c.json({ error: 'Failed to create layout' }, 500);
|
||||
}
|
||||
|
||||
return c.json(data, 201);
|
||||
} catch (err: any) {
|
||||
if (err.name === 'ZodError') {
|
||||
return c.json({ error: 'Invalid request data', details: err.errors }, 400);
|
||||
}
|
||||
logger.error({ err }, 'Create layout failed');
|
||||
return c.json({ error: 'Internal server error' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleUpdateLayout(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
const id = c.req.param('id');
|
||||
|
||||
// Extract user ID from Auth Token
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (!authHeader) {
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user }, error: authError } = await supabase.auth.getUser(token);
|
||||
|
||||
if (authError || !user) {
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
const body = await c.req.json();
|
||||
|
||||
// Validate and parse body
|
||||
const validatedData = UpdateLayoutSchema.parse(body);
|
||||
|
||||
// Update only if user is the owner (RLS will enforce this too)
|
||||
const { data, error } = await supabase
|
||||
.from('layouts')
|
||||
.update(validatedData)
|
||||
.eq('id', id)
|
||||
.eq('owner_id', user.id)
|
||||
.select()
|
||||
.single();
|
||||
|
||||
if (error || !data) {
|
||||
logger.error({ err: error }, 'Failed to update layout');
|
||||
return c.json({ error: 'Layout not found or unauthorized' }, 404);
|
||||
}
|
||||
|
||||
return c.json(data);
|
||||
} catch (err: any) {
|
||||
if (err.name === 'ZodError') {
|
||||
return c.json({ error: 'Invalid request data', details: err.errors }, 400);
|
||||
}
|
||||
logger.error({ err }, 'Update layout failed');
|
||||
return c.json({ error: 'Internal server error' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleDeleteLayout(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
const id = c.req.param('id');
|
||||
|
||||
// Extract user ID from Auth Token
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (!authHeader) {
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user }, error: authError } = await supabase.auth.getUser(token);
|
||||
|
||||
if (authError || !user) {
|
||||
return c.json({ error: 'Unauthorized' }, 401);
|
||||
}
|
||||
|
||||
// Delete only if user is the owner (RLS will enforce this too)
|
||||
const { error } = await supabase
|
||||
.from('layouts')
|
||||
.delete()
|
||||
.eq('id', id)
|
||||
.eq('owner_id', user.id);
|
||||
|
||||
if (error) {
|
||||
logger.error({ err: error }, 'Failed to delete layout');
|
||||
return c.json({ error: 'Layout not found or unauthorized' }, 404);
|
||||
}
|
||||
|
||||
return c.body(null, 204);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Delete layout failed');
|
||||
return c.json({ error: 'Internal server error' }, 500);
|
||||
}
|
||||
}
|
||||
999
packages/shared/src/server/products/serving/db/db-pages.ts
Normal file
999
packages/shared/src/server/products/serving/db/db-pages.ts
Normal file
@ -0,0 +1,999 @@
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { createRouteBody } from '../routes.js';
|
||||
import { z } from '@hono/zod-openapi';
|
||||
import { logger } from '../logger.js';
|
||||
import { extractDisplayImage, loadTemplate, inject } from '../renderer.js';
|
||||
import { Context } from 'hono';
|
||||
import { generateMarkdownFromPage } from '../markdown-generator.js';
|
||||
|
||||
export interface UserPageDetails {
|
||||
page: any;
|
||||
userProfile: any;
|
||||
childPages: any[];
|
||||
}
|
||||
|
||||
export interface EnrichPageOptions {
|
||||
includeThumbnail?: boolean;
|
||||
includeUser?: boolean;
|
||||
includeCategories?: boolean;
|
||||
includeChildren?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enriches a page object with additional data based on options
|
||||
*/
|
||||
export const enrichPageData = async (
|
||||
supabase: SupabaseClient,
|
||||
page: any,
|
||||
options: EnrichPageOptions = {}
|
||||
): Promise<any> => {
|
||||
const {
|
||||
includeThumbnail = true,
|
||||
includeUser = false,
|
||||
includeCategories = false,
|
||||
includeChildren = false
|
||||
} = options;
|
||||
|
||||
const enrichedPage = { ...page };
|
||||
|
||||
// Extract and enrich thumbnail
|
||||
if (includeThumbnail) {
|
||||
let content = page.content;
|
||||
if (typeof content === 'string') {
|
||||
try {
|
||||
content = JSON.parse(content);
|
||||
} catch (e) {
|
||||
// Keep as string if parse fails
|
||||
}
|
||||
}
|
||||
|
||||
const imageCandidate = extractDisplayImage(content);
|
||||
if (imageCandidate) {
|
||||
if (imageCandidate.pictureId) {
|
||||
// Fetch the actual image URL from the pictures table
|
||||
const { data: picture } = await supabase
|
||||
.from('pictures')
|
||||
.select('image_url')
|
||||
.eq('id', imageCandidate.pictureId)
|
||||
.single();
|
||||
|
||||
if (picture?.image_url) {
|
||||
enrichedPage.meta = {
|
||||
...enrichedPage.meta,
|
||||
thumbnail: picture.image_url
|
||||
};
|
||||
}
|
||||
} else if (imageCandidate.imageUrl) {
|
||||
// Direct URL from image widget or markdown
|
||||
enrichedPage.meta = {
|
||||
...enrichedPage.meta,
|
||||
thumbnail: imageCandidate.imageUrl
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Enrich with user profile
|
||||
if (includeUser) {
|
||||
const { data: userProfile } = await supabase
|
||||
.from('profiles')
|
||||
.select('user_id, username, display_name, avatar_url')
|
||||
.eq('user_id', page.owner)
|
||||
.single();
|
||||
|
||||
if (userProfile) {
|
||||
enrichedPage.user_profile = userProfile;
|
||||
}
|
||||
}
|
||||
|
||||
// Enrich with category paths
|
||||
if (includeCategories) {
|
||||
const categoryIds: string[] = [];
|
||||
if (page.meta?.categoryIds && Array.isArray(page.meta.categoryIds)) {
|
||||
categoryIds.push(...page.meta.categoryIds);
|
||||
} else if (page.meta?.categoryId) {
|
||||
categoryIds.push(page.meta.categoryId);
|
||||
}
|
||||
|
||||
if (categoryIds.length > 0) {
|
||||
// Reuse the fetchCategoryAncestry logic from fetchUserPageDetailsServer
|
||||
// For now, just store the IDs - can be expanded later
|
||||
enrichedPage.category_ids = categoryIds;
|
||||
}
|
||||
}
|
||||
|
||||
// Enrich with child pages
|
||||
if (includeChildren) {
|
||||
const { data: childPages } = await supabase
|
||||
.from('pages')
|
||||
.select('id, title, slug')
|
||||
.eq('parent', page.id)
|
||||
.order('title');
|
||||
|
||||
if (childPages) {
|
||||
enrichedPage.child_pages = childPages;
|
||||
}
|
||||
}
|
||||
|
||||
return enrichedPage;
|
||||
};
|
||||
|
||||
export const fetchUserPageDetailsServer = async (
|
||||
supabase: SupabaseClient,
|
||||
identifier: string, // userId, username, or pageId
|
||||
slug: string,
|
||||
requesterUserId?: string
|
||||
): Promise<UserPageDetails | null> => {
|
||||
let userId = identifier;
|
||||
let resolvedProfile: any = null;
|
||||
let page: any = null;
|
||||
|
||||
// 1. Check if we're looking up by page ID (slug is a UUID)
|
||||
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
|
||||
const isSlugUuid = uuidRegex.test(slug);
|
||||
|
||||
if (isSlugUuid) {
|
||||
// Lookup by page ID directly
|
||||
const pageId = slug;
|
||||
|
||||
const { data: pageData, error } = await supabase
|
||||
.from('pages')
|
||||
.select('*')
|
||||
.eq('id', pageId)
|
||||
.maybeSingle();
|
||||
|
||||
if (error || !pageData) return null;
|
||||
|
||||
page = pageData;
|
||||
userId = page.owner;
|
||||
|
||||
const isOwner = requesterUserId === userId;
|
||||
|
||||
// Check visibility
|
||||
if (!isOwner && (!page.is_public || !page.visible)) {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
// Traditional lookup by userId/username + slug
|
||||
// Resolve User ID if identifier is a username
|
||||
if (!uuidRegex.test(identifier)) {
|
||||
const { data: profile } = await supabase
|
||||
.from('profiles')
|
||||
.select('user_id, username, display_name, avatar_url')
|
||||
.eq('username', identifier)
|
||||
.single();
|
||||
|
||||
if (profile) {
|
||||
userId = profile.user_id;
|
||||
resolvedProfile = profile;
|
||||
} else {
|
||||
return null; // User not found
|
||||
}
|
||||
}
|
||||
|
||||
const isOwner = requesterUserId === userId;
|
||||
|
||||
// 2. Fetch Page by slug
|
||||
let query = supabase
|
||||
.from('pages')
|
||||
.select('*')
|
||||
.eq('slug', slug)
|
||||
.eq('owner', userId);
|
||||
|
||||
if (!isOwner) {
|
||||
query = query.eq('is_public', true).eq('visible', true);
|
||||
}
|
||||
|
||||
const { data: pageData, error } = await query.maybeSingle();
|
||||
|
||||
if (error || !pageData) return null;
|
||||
|
||||
page = pageData;
|
||||
}
|
||||
|
||||
const isOwner = requesterUserId === userId;
|
||||
|
||||
// 3. Fetch Additional Data in Parallel
|
||||
const categoryIds: string[] = [];
|
||||
if (page.meta?.categoryIds && Array.isArray(page.meta.categoryIds)) {
|
||||
categoryIds.push(...page.meta.categoryIds);
|
||||
} else if (page.meta?.categoryId) {
|
||||
categoryIds.push(page.meta.categoryId);
|
||||
}
|
||||
|
||||
const fetchCategoryAncestry = async (baseCategoryIds: string[]) => {
|
||||
if (baseCategoryIds.length === 0) return [];
|
||||
const { fetchCategoryAncestry } = await import('./db-categories.js');
|
||||
return fetchCategoryAncestry(supabase, baseCategoryIds);
|
||||
};
|
||||
|
||||
const [parentPageRes, childPagesRes, profileRes, categoryPaths] = await Promise.all([
|
||||
// Parent Page
|
||||
page.parent
|
||||
? supabase.from('pages').select('title, slug').eq('id', page.parent).single()
|
||||
: Promise.resolve({ data: null, error: null }),
|
||||
|
||||
// Child Pages
|
||||
(() => {
|
||||
let childQuery = supabase
|
||||
.from('pages')
|
||||
.select('id, title, slug, visible, is_public')
|
||||
.eq('parent', page.id)
|
||||
.order('title');
|
||||
|
||||
if (!isOwner) {
|
||||
childQuery = childQuery.eq('visible', true).eq('is_public', true);
|
||||
}
|
||||
return childQuery;
|
||||
})(),
|
||||
|
||||
// User Profile
|
||||
(() => {
|
||||
if (resolvedProfile) return Promise.resolve({ data: resolvedProfile, error: null });
|
||||
|
||||
return (async () => {
|
||||
const { fetchUserProfilesCached } = await import('./db-user.js');
|
||||
const profiles = await fetchUserProfilesCached(supabase, [userId]);
|
||||
const profile = profiles[userId] || null;
|
||||
return { data: profile, error: null };
|
||||
})();
|
||||
})(),
|
||||
|
||||
// Category Paths
|
||||
fetchCategoryAncestry(categoryIds)
|
||||
]);
|
||||
|
||||
// Construct Result
|
||||
const result: UserPageDetails = {
|
||||
page: {
|
||||
...page,
|
||||
parent_page: parentPageRes.data || null,
|
||||
category_paths: categoryPaths
|
||||
},
|
||||
userProfile: profileRes.data || null,
|
||||
childPages: childPagesRes.data || []
|
||||
};
|
||||
|
||||
// Enrich page with thumbnail using helper function
|
||||
result.page = await enrichPageData(supabase, result.page, {
|
||||
includeThumbnail: true,
|
||||
includeUser: false, // Already fetched above
|
||||
includeCategories: false, // Already fetched above
|
||||
includeChildren: false // Already fetched above
|
||||
});
|
||||
|
||||
// Ensure page content has valid structure for GenericCanvas if it's not a markdown string
|
||||
if (result.page.content !== null && typeof result.page.content !== 'string') {
|
||||
if (!result.page.content.containers) {
|
||||
result.page.content.containers = [];
|
||||
}
|
||||
} else if (result.page.content === null) {
|
||||
// Default to empty canvas layout if content is null
|
||||
result.page.content = { containers: [] };
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export async function renderPageContent(content: any): Promise<{ markdown: string, images: string[] }> {
|
||||
let markdown = '';
|
||||
const images: string[] = [];
|
||||
const pictureIds: string[] = [];
|
||||
|
||||
try {
|
||||
let targetContent = content;
|
||||
|
||||
// Handle Redux-style normalized state if present
|
||||
if (content && content.pages && typeof content.pages === 'object') {
|
||||
const pageKeys = Object.keys(content.pages);
|
||||
if (pageKeys.length > 0) {
|
||||
targetContent = content.pages[pageKeys[0]];
|
||||
}
|
||||
}
|
||||
|
||||
// 1. Pass 1: Collect Picture IDs
|
||||
if (targetContent && targetContent.containers && Array.isArray(targetContent.containers)) {
|
||||
for (const container of targetContent.containers) {
|
||||
if (container.widgets && Array.isArray(container.widgets)) {
|
||||
for (const widget of container.widgets) {
|
||||
if (widget.widgetId === 'photo-card' && widget.props && widget.props.pictureId) {
|
||||
pictureIds.push(widget.props.pictureId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Resolve Images
|
||||
const imageMap: Record<string, string> = {};
|
||||
if (pictureIds.length > 0) {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
const { ensureCachedImageFromUrl } = await import('../../images/index.js');
|
||||
|
||||
const { data: pictures } = await supabase
|
||||
.from('pictures')
|
||||
.select('id, image_url, meta')
|
||||
.in('id', pictureIds);
|
||||
|
||||
if (pictures) {
|
||||
const baseUrl = process.env.SERVER_IMAGE_API_URL || 'http://localhost:3000';
|
||||
|
||||
await Promise.all(pictures.map(async (p) => {
|
||||
try {
|
||||
// Logic similar to augmentPosts
|
||||
const meta = p.meta || {};
|
||||
const aspect = (meta.width && meta.height) ? meta.height / meta.width : 0;
|
||||
// For meta tags, we want a good cover size, say 1200w
|
||||
const width = 1200;
|
||||
const height = aspect ? Math.round(width * aspect) : 630; // fallback height
|
||||
|
||||
const filename = await ensureCachedImageFromUrl(p.image_url, width, height, 'jpeg');
|
||||
imageMap[p.id] = `${baseUrl}/api/images/cache/${filename}`;
|
||||
} catch (err) {
|
||||
logger.warn({ err, pictureId: p.id }, 'Failed to cache image for page meta');
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Pass 2: Render
|
||||
if (targetContent && targetContent.containers && Array.isArray(targetContent.containers)) {
|
||||
for (const container of targetContent.containers) {
|
||||
if (container.widgets && Array.isArray(container.widgets)) {
|
||||
for (const widget of container.widgets) {
|
||||
if (widget.widgetId === 'markdown-text' && widget.props && widget.props.content) {
|
||||
markdown += widget.props.content + '\n\n';
|
||||
} else if (widget.widgetId === 'photo-card' && widget.props && widget.props.pictureId) {
|
||||
const imageUrl = imageMap[widget.props.pictureId];
|
||||
if (imageUrl) {
|
||||
images.push(imageUrl);
|
||||
markdown += `\n\n`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
logger.error({ e }, 'Error rendering page content');
|
||||
}
|
||||
|
||||
return { markdown: markdown.trim(), images };
|
||||
}
|
||||
|
||||
// Route Definitions
|
||||
export const getUserPageRoute = createRouteBody(
|
||||
'get',
|
||||
'/user/:userId/pages/:slug',
|
||||
['Pages'],
|
||||
'Get User Page Meta',
|
||||
'Serves the user page with injected Open Graph metadata.',
|
||||
{
|
||||
params: z.object({
|
||||
userId: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'HTML Page',
|
||||
content: {
|
||||
'text/html': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const getOrgUserPageRoute = createRouteBody(
|
||||
'get',
|
||||
'/org/:orgSlug/user/:userId/pages/:slug',
|
||||
['Pages'],
|
||||
'Get Org User Page Meta',
|
||||
'Serves the org user page with injected Open Graph metadata.',
|
||||
{
|
||||
params: z.object({
|
||||
orgSlug: z.string(),
|
||||
userId: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'HTML Page',
|
||||
content: {
|
||||
'text/html': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const getApiUserPageRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/user-page/:identifier/:slug',
|
||||
['Pages'],
|
||||
'Get User Page Details',
|
||||
'Get User Page Details',
|
||||
{
|
||||
params: z.object({
|
||||
identifier: z.string().openapi({ description: 'User ID or Username' }),
|
||||
slug: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'User Page Details',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Page not found'
|
||||
}
|
||||
},
|
||||
true // public (pages are public usually)
|
||||
);
|
||||
|
||||
export const getUserPageContentRoute = createRouteBody(
|
||||
'get',
|
||||
'/user/:userId/pages/:slug/content',
|
||||
['Pages'],
|
||||
'Get User Page Content',
|
||||
'Dehydrates the user page content to Markdown.',
|
||||
{
|
||||
params: z.object({
|
||||
userId: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Markdown Content',
|
||||
content: {
|
||||
'text/markdown': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
false // private? Assuming need auth or ownership or token extraction, better safe than sorry
|
||||
);
|
||||
|
||||
export const getOrgUserPageContentRoute = createRouteBody(
|
||||
'get',
|
||||
'/org/:orgSlug/user/:userId/pages/:slug/content',
|
||||
['Pages'],
|
||||
'Get Org User Page Content',
|
||||
'Dehydrates the org user page content to Markdown.',
|
||||
{
|
||||
params: z.object({
|
||||
orgSlug: z.string(),
|
||||
userId: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Markdown Content',
|
||||
content: {
|
||||
'text/markdown': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
export const getEmbedPageRoute = createRouteBody(
|
||||
'get',
|
||||
'/embed/page/:id',
|
||||
['Pages'],
|
||||
'Get Embed Page (User Page)',
|
||||
'Serves the embed page for a User Page.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'HTML Page',
|
||||
content: {
|
||||
'text/html': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: { description: 'Page not found' }
|
||||
}
|
||||
);
|
||||
|
||||
export const getUserPageMarkdownRoute = createRouteBody(
|
||||
'get',
|
||||
'/user/:identifier/pages/:slug{.+\\.md$}',
|
||||
['Pages'],
|
||||
'Get User Page Markdown',
|
||||
'Returns the user page content as Markdown.',
|
||||
{
|
||||
params: z.object({
|
||||
identifier: z.string().openapi({ description: 'User ID or Username' }),
|
||||
slug: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Markdown Content',
|
||||
content: {
|
||||
'text/markdown': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Page not found'
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
export const getUserPageHtmlRoute = createRouteBody(
|
||||
'get',
|
||||
'/user/:identifier/pages/:slug{.+\\.html$}',
|
||||
['Pages'],
|
||||
'Get User Page HTML',
|
||||
'Returns the user page content as static HTML with proper meta tags.',
|
||||
{
|
||||
params: z.object({
|
||||
identifier: z.string().openapi({ description: 'User ID or Username' }),
|
||||
slug: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'HTML Content',
|
||||
content: {
|
||||
'text/html': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Page not found'
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
export const getUserPagePdfRoute = createRouteBody(
|
||||
'get',
|
||||
'/user/:identifier/pages/:slug{.+\\.pdf$}',
|
||||
['Pages'],
|
||||
'Get User Page PDF',
|
||||
'Returns the user page content as a PDF document.',
|
||||
{
|
||||
params: z.object({
|
||||
identifier: z.string().openapi({ description: 'User ID or Username' }),
|
||||
slug: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'PDF Document',
|
||||
content: {
|
||||
'application/pdf': {
|
||||
schema: z.string().openapi({ format: 'binary' })
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Page not found'
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
export const getUserPageJsonRoute = createRouteBody(
|
||||
'get',
|
||||
'/user/:identifier/pages/:slug{.+\\.json$}',
|
||||
['Pages'],
|
||||
'Get User Page JSON',
|
||||
'Returns the user page data as JSON.',
|
||||
{
|
||||
params: z.object({
|
||||
identifier: z.string().openapi({ description: 'User ID or Username' }),
|
||||
slug: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Page data as JSON',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
page: z.any(),
|
||||
userProfile: z.any()
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Page not found'
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
// --- Handlers ---
|
||||
|
||||
export async function handleGetApiUserPage(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const identifier = c.req.param('identifier');
|
||||
const slug = c.req.param('slug');
|
||||
|
||||
// Extract user ID from Auth Token (if present)
|
||||
let requesterUserId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
requesterUserId = user?.id;
|
||||
}
|
||||
|
||||
const result = await fetchUserPageDetailsServer(supabase, identifier, slug, requesterUserId);
|
||||
|
||||
if (!result) {
|
||||
return c.json({ error: 'Page not found' }, 404);
|
||||
}
|
||||
|
||||
return c.json(result);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'API User Page fetch failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetUserPageMarkdown(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const identifier = c.req.param('identifier');
|
||||
let slug = c.req.param('slug');
|
||||
|
||||
// Strip .md extension if present (due to regex match)
|
||||
if (slug && slug.endsWith('.md')) {
|
||||
slug = slug.slice(0, -3);
|
||||
}
|
||||
|
||||
// No auth required for public pages (implied by fetch logic handling privileges)
|
||||
// But we can check token if we want to allow private page export for owner
|
||||
let requesterUserId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
requesterUserId = user?.id;
|
||||
}
|
||||
|
||||
const result = await fetchUserPageDetailsServer(supabase, identifier, slug, requesterUserId);
|
||||
|
||||
if (!result || !result.page) {
|
||||
return c.text('Page not found', 404);
|
||||
}
|
||||
|
||||
const authorName = result.userProfile?.display_name || result.userProfile?.username || result.page.owner;
|
||||
const markdown = generateMarkdownFromPage(result.page, authorName);
|
||||
|
||||
c.header('Content-Type', 'text/markdown; charset=utf-8');
|
||||
// Suggest filename
|
||||
const filename = `${slug}.md`;
|
||||
c.header('Content-Disposition', `inline; filename="${filename}"`);
|
||||
|
||||
return c.body(markdown);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'User Page Markdown export failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetUserPageHtml(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const identifier = c.req.param('identifier');
|
||||
let slug = c.req.param('slug');
|
||||
|
||||
// Strip .html extension if present (due to regex match)
|
||||
if (slug && slug.endsWith('.html')) {
|
||||
slug = slug.slice(0, -5);
|
||||
}
|
||||
|
||||
let requesterUserId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
requesterUserId = user?.id;
|
||||
}
|
||||
|
||||
const result = await fetchUserPageDetailsServer(supabase, identifier, slug, requesterUserId);
|
||||
|
||||
if (!result || !result.page) {
|
||||
return c.text('Page not found', 404);
|
||||
}
|
||||
|
||||
const { page, userProfile } = result;
|
||||
const authorName = userProfile?.display_name || userProfile?.username || page.owner;
|
||||
|
||||
// Render markdown content
|
||||
const { markdown, images } = await renderPageContent(page.content);
|
||||
|
||||
// Convert markdown to HTML (simple conversion)
|
||||
// For a more robust solution, you could use a markdown parser like 'marked'
|
||||
// For now, we'll use a basic conversion
|
||||
const { marked } = await import('marked');
|
||||
const htmlContent = await marked(markdown);
|
||||
|
||||
// Extract description from markdown
|
||||
const truncate = (str: string, max: number) => str.length > max ? str.substring(0, max - 1) + '…' : str;
|
||||
let rawDescription = markdown
|
||||
.replace(/!\[.*?\]\(.*?\)/g, '') // Remove images
|
||||
.replace(/[#*\[\]()]/g, '') // Remove markdown chars
|
||||
.replace(/\n+/g, ' ') // Collapse formatting
|
||||
.trim();
|
||||
|
||||
const description = rawDescription.length > 50 ? truncate(rawDescription, 200) : (page.description || `Check out this page by ${authorName}.`);
|
||||
|
||||
// Get image using the same logic as handleGetPageMeta
|
||||
const metaImage = images.length > 0 ? images[0] : undefined;
|
||||
let finalMetaImage = metaImage;
|
||||
|
||||
if (!finalMetaImage) {
|
||||
// Fallback: Try to extract from content using shared helper
|
||||
const fallback = extractDisplayImage(page.content);
|
||||
if (fallback && fallback.imageUrl) {
|
||||
finalMetaImage = fallback.imageUrl;
|
||||
} else if (fallback && fallback.pictureId) {
|
||||
// Convert pictureId to URL
|
||||
const baseUrl = process.env.SERVER_URL || 'http://localhost:3333';
|
||||
finalMetaImage = `${baseUrl}/api/images/${fallback.pictureId}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Generate canonical URL
|
||||
const baseUrl = process.env.SERVER_URL || 'http://localhost:3333';
|
||||
const canonicalUrl = `${baseUrl}/user/${identifier}/pages/${slug}`;
|
||||
|
||||
// Generate static HTML
|
||||
const { generateStaticHtml } = await import('../html-generator.js');
|
||||
const html = generateStaticHtml({
|
||||
title: `${truncate(page.title, 50)} by ${truncate(authorName, 20)} | PolyMech`,
|
||||
description,
|
||||
image: finalMetaImage,
|
||||
author: authorName,
|
||||
content: htmlContent,
|
||||
canonicalUrl
|
||||
});
|
||||
|
||||
c.header('Content-Type', 'text/html; charset=utf-8');
|
||||
return c.html(html);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'User Page HTML export failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetUserPagePdf(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const identifier = c.req.param('identifier');
|
||||
let slug = c.req.param('slug');
|
||||
|
||||
// Strip .pdf extension if present (due to regex match)
|
||||
if (slug && slug.endsWith('.pdf')) {
|
||||
slug = slug.slice(0, -4);
|
||||
}
|
||||
|
||||
let requesterUserId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
requesterUserId = user?.id;
|
||||
}
|
||||
|
||||
const result = await fetchUserPageDetailsServer(supabase, identifier, slug, requesterUserId);
|
||||
|
||||
if (!result || !result.page) {
|
||||
return c.text('Page not found', 404);
|
||||
}
|
||||
|
||||
const { page, userProfile } = result;
|
||||
|
||||
// Import PDF product dynamically
|
||||
const { PdfProduct } = await import('../../pdf/index.js');
|
||||
const pdfProduct = new PdfProduct();
|
||||
|
||||
// Use the existing PDF generation logic from the PDF product
|
||||
// We'll create a mock context with the page ID
|
||||
const mockContext = {
|
||||
req: {
|
||||
param: (name: string) => {
|
||||
if (name === 'id') return page.id;
|
||||
return c.req.param(name);
|
||||
},
|
||||
header: (name: string) => c.req.header(name)
|
||||
},
|
||||
header: (name: string, value: string) => c.header(name, value),
|
||||
body: (data: any) => c.body(data),
|
||||
text: (text: string) => c.text(text, 500)
|
||||
} as any;
|
||||
|
||||
return await pdfProduct.handleRenderPagePdf(mockContext);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'User Page PDF export failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetUserPageJson(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const identifier = c.req.param('identifier');
|
||||
let slug = c.req.param('slug');
|
||||
|
||||
// Strip .json extension if present (due to regex match)
|
||||
if (slug && slug.endsWith('.json')) {
|
||||
slug = slug.slice(0, -5);
|
||||
}
|
||||
|
||||
let requesterUserId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
requesterUserId = user?.id;
|
||||
}
|
||||
|
||||
const result = await fetchUserPageDetailsServer(supabase, identifier, slug, requesterUserId);
|
||||
|
||||
if (!result || !result.page) {
|
||||
return c.json({ error: 'Page not found' }, 404);
|
||||
}
|
||||
|
||||
return c.json(result);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'User Page JSON export failed');
|
||||
return c.json({ error: 'Internal Server Error' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetPageContent(c: Context) {
|
||||
const userId = c.req.param('userId');
|
||||
const slug = c.req.param('slug');
|
||||
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
const { data: page, error } = await supabase
|
||||
.from('pages')
|
||||
.select('content')
|
||||
.eq('owner', userId)
|
||||
.eq('slug', slug)
|
||||
.single();
|
||||
|
||||
if (error || !page) {
|
||||
return c.text('Page not found', 404);
|
||||
}
|
||||
|
||||
const { markdown } = await renderPageContent(page.content);
|
||||
return c.text(markdown, 200, { 'Content-Type': 'text/markdown' });
|
||||
}
|
||||
|
||||
export async function handleGetPageMeta(c: Context) {
|
||||
const userId = c.req.param('userId');
|
||||
const slug = c.req.param('slug');
|
||||
const orgSlug = c.req.param('orgSlug');
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
// Resolve requester for public/private logic (though meta implies public mostly)
|
||||
// No auth header usually for bots/browsers hitting this route directly
|
||||
|
||||
const result = await fetchUserPageDetailsServer(supabase, userId, slug, undefined);
|
||||
|
||||
// Bot Detection: Serve markdown to bots if enabled
|
||||
if (process.env.RENDER_HTML_BOTS === 'true') {
|
||||
const userAgent = c.req.header('User-Agent');
|
||||
const { isBotRequest, renderBotResponse } = await import('../bots.js');
|
||||
|
||||
if (isBotRequest(userAgent)) {
|
||||
if (!result || !result.page) {
|
||||
return c.text('Page not found', 404);
|
||||
}
|
||||
|
||||
const markdown = renderBotResponse(result.page, result.userProfile);
|
||||
c.header('Content-Type', 'text/markdown; charset=utf-8');
|
||||
return c.body(markdown);
|
||||
}
|
||||
}
|
||||
|
||||
const html = await loadTemplate();
|
||||
if (!html) {
|
||||
logger.error('handleGetPageMeta:Template not found');
|
||||
return c.text('handleGetPageMeta:Template not found', 500);
|
||||
}
|
||||
|
||||
if (!result || !result.page) {
|
||||
logger.warn({ userId, slug }, 'Page not found for meta injection');
|
||||
return c.html(html); // Serve app shell, let client handle 404
|
||||
}
|
||||
|
||||
const { page, userProfile } = result;
|
||||
|
||||
// 3. Construct Meta
|
||||
const authorName = userProfile?.display_name || userProfile?.username || "Polymech User";
|
||||
const truncate = (str: string, max: number) => str.length > max ? str.substring(0, max - 1) + '…' : str;
|
||||
|
||||
const { markdown, images } = await renderPageContent(page.content);
|
||||
|
||||
const pageTitle = `${truncate(page.title, 50)} by ${truncate(authorName, 20)} | PolyMech`;
|
||||
|
||||
let rawDescription = markdown
|
||||
.replace(/!\[.*?\]\(.*?\)/g, '') // Remove images first
|
||||
.replace(/[#*\[\]()]/g, '') // Remove remaining markdown chars
|
||||
.replace(/\n+/g, ' ') // Collapse formatting
|
||||
.trim();
|
||||
|
||||
let description = (rawDescription.length > 50 ? rawDescription : page.description) || `Check out this page by ${authorName}.`;
|
||||
|
||||
if (description.length > 200) {
|
||||
description = truncate(description, 200);
|
||||
}
|
||||
|
||||
const metaImage = images.length > 0 ? images[0] : undefined;
|
||||
let finalMetaImage = metaImage;
|
||||
|
||||
if (!finalMetaImage) {
|
||||
// Fallback: Try to extract from content using shared helper (e.g. Markdown images)
|
||||
const fallback = extractDisplayImage(page.content);
|
||||
if (fallback && fallback.imageUrl) {
|
||||
finalMetaImage = fallback.imageUrl;
|
||||
}
|
||||
}
|
||||
|
||||
const meta = {
|
||||
title: pageTitle,
|
||||
description: description,
|
||||
image: finalMetaImage,
|
||||
jsonLd: {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "WebPage",
|
||||
"headline": truncate(page.title, 100),
|
||||
"datePublished": page.created_at,
|
||||
"dateModified": page.updated_at,
|
||||
"author": {
|
||||
"@type": "Person",
|
||||
"name": authorName
|
||||
},
|
||||
"description": description
|
||||
}
|
||||
};
|
||||
|
||||
const injectedHtml = inject(html, meta);
|
||||
return c.html(injectedHtml);
|
||||
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Page Meta injection failed');
|
||||
const html = await loadTemplate();
|
||||
return c.html(html || 'Error');
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,300 @@
|
||||
import { Context } from 'hono';
|
||||
import { z } from 'zod';
|
||||
import { createRouteBody } from '../routes.js';
|
||||
import { logger } from '../logger.js';
|
||||
import { fetchPostDetailsServer } from './db-posts.js';
|
||||
|
||||
// ==================== POST EXPORT ROUTES ====================
|
||||
|
||||
export const getPostPdfRoute = createRouteBody(
|
||||
'get',
|
||||
'/post/:id{.+\.pdf$}',
|
||||
['Posts'],
|
||||
'Get Post PDF',
|
||||
'Returns the post content as a PDF document.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'PDF Document',
|
||||
content: {
|
||||
'application/pdf': {
|
||||
schema: z.string().openapi({ format: 'binary' })
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Post not found'
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
export const getPostJsonRoute = createRouteBody(
|
||||
'get',
|
||||
'/post/:id{.+\.json$}',
|
||||
['Posts'],
|
||||
'Get Post JSON',
|
||||
'Returns the post data as JSON.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Post data as JSON',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
post: z.any(),
|
||||
userProfile: z.any().optional()
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Post not found'
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
export const getPostHtmlRoute = createRouteBody(
|
||||
'get',
|
||||
'/post/:id{.+\.html$}',
|
||||
['Posts'],
|
||||
'Get Post HTML',
|
||||
'Returns static HTML with Open Graph metadata for the post.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'HTML Document',
|
||||
content: {
|
||||
'text/html': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Post not found'
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
export const getPostMarkdownRoute = createRouteBody(
|
||||
'get',
|
||||
'/post/:id{.+\.md$}',
|
||||
['Posts'],
|
||||
'Get Post Markdown',
|
||||
'Returns the post content as markdown.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Markdown content',
|
||||
content: {
|
||||
'text/markdown': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Post not found'
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
// ==================== POST EXPORT HANDLERS ====================
|
||||
|
||||
export async function handleGetPostPdf(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
let id = c.req.param('id');
|
||||
|
||||
logger.info({ rawId: id }, 'PDF export - raw ID from route');
|
||||
|
||||
// Strip .pdf extension if present (due to regex match)
|
||||
if (id && id.endsWith('.pdf')) {
|
||||
id = id.slice(0, -4);
|
||||
logger.info({ strippedId: id }, 'PDF export - stripped .pdf extension');
|
||||
}
|
||||
|
||||
let requesterUserId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
requesterUserId = user?.id;
|
||||
}
|
||||
|
||||
const result = await fetchPostDetailsServer(supabase, id, {}, undefined);
|
||||
|
||||
if (!result) {
|
||||
return c.text('Post not found', 404);
|
||||
}
|
||||
|
||||
// Import PDF product dynamically
|
||||
const { PdfProduct } = await import('../../pdf/index.js');
|
||||
const pdfProduct = new PdfProduct();
|
||||
|
||||
// Use the existing PDF generation logic from the PDF product
|
||||
const mockContext = {
|
||||
req: {
|
||||
param: (name: string) => {
|
||||
if (name === 'id') return id;
|
||||
return c.req.param(name);
|
||||
},
|
||||
header: (name: string) => c.req.header(name)
|
||||
},
|
||||
header: (name: string, value: string) => c.header(name, value),
|
||||
body: (data: any) => c.body(data),
|
||||
text: (text: string) => c.text(text, 500)
|
||||
} as any;
|
||||
|
||||
return await pdfProduct.handleRenderPdf(mockContext);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Post PDF export failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetPostJson(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
let id = c.req.param('id');
|
||||
|
||||
// Strip .json extension if present (due to regex match)
|
||||
if (id && id.endsWith('.json')) {
|
||||
id = id.slice(0, -5);
|
||||
}
|
||||
|
||||
let requesterUserId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
requesterUserId = user?.id;
|
||||
}
|
||||
|
||||
const result = await fetchPostDetailsServer(supabase, id, {}, undefined);
|
||||
|
||||
if (!result) {
|
||||
return c.json({ error: 'Post not found' }, 404);
|
||||
}
|
||||
|
||||
// Fetch user profile
|
||||
const { fetchUserProfilesCached } = await import('./db-user.js');
|
||||
const profiles = await fetchUserProfilesCached(supabase, [result.user_id]);
|
||||
const userProfile = profiles[result.user_id];
|
||||
|
||||
return c.json({ post: result, userProfile });
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Post JSON export failed');
|
||||
return c.json({ error: 'Internal Server Error' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetPostHtml(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
let id = c.req.param('id');
|
||||
|
||||
// Strip .html extension if present (due to regex match)
|
||||
if (id && id.endsWith('.html')) {
|
||||
id = id.slice(0, -5);
|
||||
}
|
||||
|
||||
const result = await fetchPostDetailsServer(supabase, id, {}, undefined);
|
||||
|
||||
if (!result) {
|
||||
return c.text('Post not found', 404);
|
||||
}
|
||||
// Use template injection approach like post meta handler
|
||||
const { loadTemplate, inject, extractDisplayImage } = await import('../renderer.js');
|
||||
|
||||
const template = await loadTemplate();
|
||||
|
||||
if (!template) {
|
||||
return c.text('Template not found', 500);
|
||||
}
|
||||
|
||||
const imageCandidate = extractDisplayImage(result.pictures);
|
||||
const imageUrl = imageCandidate?.imageUrl || (result.pictures && result.pictures[0]?.image_url);
|
||||
|
||||
const meta = {
|
||||
title: result.title || 'Untitled Post',
|
||||
description: result.description || '',
|
||||
image: imageUrl,
|
||||
url: `${process.env.SERVER_URL || 'https://polymech.info'}/post/${result.id}`
|
||||
};
|
||||
|
||||
const html = inject(template, meta);
|
||||
|
||||
c.header('Content-Type', 'text/html');
|
||||
return c.body(html);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Post HTML export failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetPostMarkdown(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
let id = c.req.param('id');
|
||||
|
||||
// Strip .md extension if present (due to regex match)
|
||||
if (id && id.endsWith('.md')) {
|
||||
id = id.slice(0, -3);
|
||||
}
|
||||
|
||||
const result = await fetchPostDetailsServer(supabase, id, {}, undefined);
|
||||
|
||||
if (!result) {
|
||||
return c.text('Post not found', 404);
|
||||
}
|
||||
|
||||
// Generate markdown from post
|
||||
let markdown = `# ${result.title || 'Untitled Post'}\n\n`;
|
||||
|
||||
if (result.description) {
|
||||
markdown += `${result.description}\n\n`;
|
||||
}
|
||||
|
||||
// Add images
|
||||
if (result.pictures && result.pictures.length > 0) {
|
||||
markdown += `## Images\n\n`;
|
||||
result.pictures.forEach((pic: any, index: number) => {
|
||||
const title = pic.title || `Image ${index + 1}`;
|
||||
markdown += `\n\n`;
|
||||
if (pic.description) {
|
||||
markdown += `*${pic.description}*\n\n`;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
markdown += `\n---\n\nCreated: ${new Date(result.created_at).toLocaleDateString()}\n`;
|
||||
|
||||
return c.text(markdown, 200, { 'Content-Type': 'text/markdown' });
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Post Markdown export failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
1034
packages/shared/src/server/products/serving/db/db-posts.ts
Normal file
1034
packages/shared/src/server/products/serving/db/db-posts.ts
Normal file
File diff suppressed because it is too large
Load Diff
503
packages/shared/src/server/products/serving/db/db-types.ts
Normal file
503
packages/shared/src/server/products/serving/db/db-types.ts
Normal file
@ -0,0 +1,503 @@
|
||||
import { SupabaseClient } from '@supabase/supabase-js';
|
||||
import { createRouteBody } from '../routes.js';
|
||||
import { z } from '@hono/zod-openapi';
|
||||
import { Context } from 'hono';
|
||||
import { logger } from '../logger.js';
|
||||
|
||||
export const fetchTypesServer = async (supabase: SupabaseClient, options: {
|
||||
kind?: string;
|
||||
parentTypeId?: string;
|
||||
visibility?: string;
|
||||
userId?: string;
|
||||
}) => {
|
||||
let query = supabase
|
||||
.from('types')
|
||||
.select('*')
|
||||
.order('name');
|
||||
|
||||
if (options.kind) {
|
||||
query = query.eq('kind', options.kind);
|
||||
}
|
||||
|
||||
if (options.parentTypeId) {
|
||||
query = query.eq('parent_type_id', options.parentTypeId);
|
||||
}
|
||||
|
||||
if (options.visibility) {
|
||||
query = query.eq('visibility', options.visibility);
|
||||
}
|
||||
|
||||
// Note: RLS handles user visibility logic (owner_id vs public/admin)
|
||||
// We just execute the query.
|
||||
|
||||
const { data, error } = await query;
|
||||
if (error) throw error;
|
||||
return data;
|
||||
};
|
||||
|
||||
export const fetchTypeByIdServer = async (supabase: SupabaseClient, id: string) => {
|
||||
// Fetch type with its children definitions (fields, enums, flags)
|
||||
// Note: We need to know the 'kind' to know which child table to fetch,
|
||||
// but typically we can try to fetch all or do a JOIN.
|
||||
// However, Supabase/PostgREST deep joins might be cleaner.
|
||||
|
||||
// For now, let's just fetch the type and we can lazily fetch details or do a big query.
|
||||
// Let's try to fetch everything relevant.
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('types')
|
||||
.select(`
|
||||
*,
|
||||
enum_values:type_enum_values(*),
|
||||
flag_values:type_flag_values(*),
|
||||
structure_fields:type_structure_fields!type_structure_fields_structure_type_id_fkey(*),
|
||||
casts_from:type_casts!from_type_id(*),
|
||||
casts_to:type_casts!to_type_id(*)
|
||||
`)
|
||||
.eq('id', id)
|
||||
.single();
|
||||
|
||||
if (error) throw error;
|
||||
return data;
|
||||
};
|
||||
|
||||
export const createTypeServer = async (supabase: SupabaseClient, typeData: any) => {
|
||||
// 1. Create the Type record
|
||||
const { data: type, error } = await supabase
|
||||
.from('types')
|
||||
.insert({
|
||||
name: typeData.name,
|
||||
kind: typeData.kind,
|
||||
parent_type_id: typeData.parent_type_id,
|
||||
description: typeData.description,
|
||||
json_schema: typeData.jsonSchema,
|
||||
owner_id: typeData.ownerId, // Optional, RLS/Trigger might set defaults or validate
|
||||
visibility: typeData.visibility || 'public',
|
||||
meta: typeData.meta,
|
||||
settings: typeData.settings
|
||||
})
|
||||
.select()
|
||||
.single();
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// 2. Handle specific kind data (Enums, Flags, Structures)
|
||||
// Note: This matches the atomic creation need. Transaction would be ideal.
|
||||
// Supabase JS doesn't do transactions easily without RPC, but we can do sequential inserts.
|
||||
// If failures happen, we might have partial state (orphan type), but that's acceptable for now.
|
||||
|
||||
if (type.kind === 'enum' && typeData.enumValues && Array.isArray(typeData.enumValues)) {
|
||||
const values = typeData.enumValues.map((v: any, idx: number) => ({
|
||||
type_id: type.id,
|
||||
value: v.value,
|
||||
label: v.label,
|
||||
order: v.order ?? idx
|
||||
}));
|
||||
|
||||
if (values.length > 0) {
|
||||
const { error: enumError } = await supabase.from('type_enum_values').insert(values);
|
||||
if (enumError) throw enumError; // Should we delete the type? For now just throw.
|
||||
}
|
||||
}
|
||||
|
||||
if (type.kind === 'flags' && typeData.flagValues && Array.isArray(typeData.flagValues)) {
|
||||
const values = typeData.flagValues.map((v: any) => ({
|
||||
type_id: type.id,
|
||||
name: v.name,
|
||||
bit: v.bit
|
||||
}));
|
||||
|
||||
if (values.length > 0) {
|
||||
const { error: flagError } = await supabase.from('type_flag_values').insert(values);
|
||||
if (flagError) throw flagError;
|
||||
}
|
||||
}
|
||||
|
||||
if (type.kind === 'structure' && typeData.structure_fields && Array.isArray(typeData.structure_fields)) {
|
||||
const values = typeData.structure_fields.map((v: any, idx: number) => ({
|
||||
structure_type_id: type.id,
|
||||
field_name: v.field_name,
|
||||
field_type_id: v.field_type_id,
|
||||
required: v.required ?? false,
|
||||
default_value: v.default_value,
|
||||
order: v.order ?? idx
|
||||
}));
|
||||
|
||||
if (values.length > 0) {
|
||||
const { error: structError } = await supabase.from('type_structure_fields').insert(values);
|
||||
if (structError) throw structError;
|
||||
}
|
||||
}
|
||||
|
||||
return fetchTypeByIdServer(supabase, type.id); // Return full object
|
||||
};
|
||||
|
||||
export const updateTypeServer = async (supabase: SupabaseClient, id: string, updates: any) => {
|
||||
// 1. Update Core Type
|
||||
const { data: type, error } = await supabase
|
||||
.from('types')
|
||||
.update({
|
||||
name: updates.name,
|
||||
description: updates.description,
|
||||
json_schema: updates.jsonSchema,
|
||||
visibility: updates.visibility,
|
||||
meta: updates.meta,
|
||||
settings: updates.settings
|
||||
})
|
||||
.eq('id', id)
|
||||
.select()
|
||||
.single();
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// 2. Update Children
|
||||
if (updates.structure_fields && Array.isArray(updates.structure_fields)) {
|
||||
// Replace strategy: Delete all existing fields for this type and insert new ones
|
||||
const { error: deleteError } = await supabase
|
||||
.from('type_structure_fields')
|
||||
.delete()
|
||||
.eq('structure_type_id', id);
|
||||
|
||||
if (deleteError) throw deleteError;
|
||||
|
||||
const values = updates.structure_fields.map((v: any, idx: number) => ({
|
||||
structure_type_id: id,
|
||||
field_name: v.field_name,
|
||||
field_type_id: v.field_type_id,
|
||||
required: v.required ?? false,
|
||||
default_value: v.default_value,
|
||||
order: v.order ?? idx
|
||||
}));
|
||||
|
||||
if (values.length > 0) {
|
||||
const { error: insertError } = await supabase.from('type_structure_fields').insert(values);
|
||||
if (insertError) throw insertError;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Delete orphaned field types (after structure update to avoid FK constraint violations)
|
||||
if (updates.fieldsToDelete && Array.isArray(updates.fieldsToDelete) && updates.fieldsToDelete.length > 0) {
|
||||
for (const fieldTypeId of updates.fieldsToDelete) {
|
||||
await deleteTypeServer(supabase, fieldTypeId);
|
||||
}
|
||||
}
|
||||
|
||||
return fetchTypeByIdServer(supabase, id);
|
||||
};
|
||||
|
||||
export const deleteTypeServer = async (supabase: SupabaseClient, id: string) => {
|
||||
// First, get the type to check if it's a structure and collect field type IDs
|
||||
const { data: typeToDelete, error: fetchError } = await supabase
|
||||
.from('types')
|
||||
.select('kind, structure_fields:type_structure_fields!type_structure_fields_structure_type_id_fkey(field_type_id)')
|
||||
.eq('id', id)
|
||||
.single();
|
||||
|
||||
if (fetchError) throw fetchError;
|
||||
|
||||
// Collect field type IDs before deleting the structure
|
||||
let fieldTypeIds: string[] = [];
|
||||
if (typeToDelete?.kind === 'structure' && typeToDelete.structure_fields) {
|
||||
fieldTypeIds = typeToDelete.structure_fields
|
||||
.map((f: any) => f.field_type_id)
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
// Delete the structure first (CASCADE will delete type_structure_fields entries)
|
||||
const { error } = await supabase
|
||||
.from('types')
|
||||
.delete()
|
||||
.eq('id', id);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// Now delete the orphaned field types (no longer referenced by type_structure_fields)
|
||||
if (fieldTypeIds.length > 0) {
|
||||
const { error: fieldDeleteError } = await supabase
|
||||
.from('types')
|
||||
.delete()
|
||||
.in('id', fieldTypeIds);
|
||||
|
||||
if (fieldDeleteError) {
|
||||
console.error('Failed to delete field types:', fieldDeleteError);
|
||||
// Don't throw here - the main type is already deleted
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
// Route Definitions
|
||||
|
||||
export const getTypesRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/types',
|
||||
['Types'],
|
||||
'Get Types',
|
||||
'Get Types',
|
||||
{
|
||||
query: z.object({
|
||||
kind: z.enum(['primitive', 'enum', 'flags', 'structure', 'alias', 'field']).optional(),
|
||||
parentTypeId: z.string().optional(),
|
||||
visibility: z.enum(['public', 'private', 'custom']).optional()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Types List',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.array(z.any())
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
export const getTypeRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/types/:id',
|
||||
['Types'],
|
||||
'Get Type Details',
|
||||
'Get Type Details',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Type Details',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: { description: 'Type not found' }
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
export const createTypeRoute = createRouteBody(
|
||||
'post',
|
||||
'/api/types',
|
||||
['Types'],
|
||||
'Create Type',
|
||||
'Create Type',
|
||||
{
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
name: z.string(),
|
||||
kind: z.enum(['primitive', 'enum', 'flags', 'structure', 'alias', 'field']),
|
||||
parent_type_id: z.string().optional().nullable(),
|
||||
description: z.string().optional(),
|
||||
jsonSchema: z.any().optional(),
|
||||
visibility: z.enum(['public', 'private', 'custom']).optional(),
|
||||
meta: z.any().optional(),
|
||||
settings: z.any().optional(),
|
||||
// Children data for creation
|
||||
enumValues: z.array(z.object({
|
||||
value: z.string(),
|
||||
label: z.string(),
|
||||
order: z.number().optional()
|
||||
})).optional(),
|
||||
flagValues: z.array(z.object({
|
||||
name: z.string(),
|
||||
bit: z.number()
|
||||
})).optional(),
|
||||
structure_fields: z.array(z.object({
|
||||
field_name: z.string(),
|
||||
field_type_id: z.string(),
|
||||
required: z.boolean().optional(),
|
||||
default_value: z.any().optional(),
|
||||
order: z.number().optional()
|
||||
})).optional()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Type Created',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
},
|
||||
401: { description: 'Unauthorized' }
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
export const updateTypeRoute = createRouteBody(
|
||||
'patch',
|
||||
'/api/types/:id',
|
||||
['Types'],
|
||||
'Update Type',
|
||||
'Update Type',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
name: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
jsonSchema: z.any().optional(),
|
||||
visibility: z.enum(['public', 'private', 'custom']).optional(),
|
||||
meta: z.any().optional(),
|
||||
settings: z.any().optional(),
|
||||
structure_fields: z.array(z.object({
|
||||
field_name: z.string(),
|
||||
field_type_id: z.string(),
|
||||
required: z.boolean().optional(),
|
||||
default_value: z.any().optional(),
|
||||
order: z.number().optional()
|
||||
})).optional(),
|
||||
fieldsToDelete: z.array(z.string()).optional()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Type Updated',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
},
|
||||
401: { description: 'Unauthorized' }
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
export const deleteTypeRoute = createRouteBody(
|
||||
'delete',
|
||||
'/api/types/:id',
|
||||
['Types'],
|
||||
'Delete Type',
|
||||
'Delete Type',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Type Deleted',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({ success: z.boolean() })
|
||||
}
|
||||
}
|
||||
},
|
||||
401: { description: 'Unauthorized' }
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
// --- Handlers ---
|
||||
|
||||
export async function handleGetTypes(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const kind = c.req.query('kind');
|
||||
const parentTypeId = c.req.query('parentTypeId');
|
||||
const visibility = c.req.query('visibility');
|
||||
|
||||
// Extract user ID from Auth Token (if present)
|
||||
let userId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
userId = user?.id;
|
||||
}
|
||||
|
||||
const types = await fetchTypesServer(supabase, { kind, parentTypeId, visibility, userId });
|
||||
return c.json(types);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Get Types failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetType(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const id = c.req.param('id');
|
||||
const type = await fetchTypeByIdServer(supabase, id);
|
||||
|
||||
if (!type) return c.json({ error: 'Type not found' }, 404);
|
||||
return c.json(type);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Get Type failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleCreateType(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const body = await c.req.json();
|
||||
|
||||
// Validate owner via Auth
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
if (user) {
|
||||
body.ownerId = user.id;
|
||||
}
|
||||
}
|
||||
|
||||
const newType = await createTypeServer(supabase, body);
|
||||
return c.json(newType);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Create Type failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleUpdateType(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const id = c.req.param('id');
|
||||
const body = await c.req.json();
|
||||
|
||||
const updatedType = await updateTypeServer(supabase, id, body);
|
||||
return c.json(updatedType);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Update Type failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleDeleteType(c: Context) {
|
||||
try {
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
const id = c.req.param('id');
|
||||
|
||||
await deleteTypeServer(supabase, id);
|
||||
return c.json({ success: true });
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Delete Type failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
216
packages/shared/src/server/products/serving/db/db-user.ts
Normal file
216
packages/shared/src/server/products/serving/db/db-user.ts
Normal file
@ -0,0 +1,216 @@
|
||||
import { Context } from 'hono';
|
||||
import { logger } from '../logger.js';
|
||||
import { loadTemplate, inject } from '../renderer.js';
|
||||
import { augmentPosts, applyClientSortAndCovers, safeStringify } from '../content.js'; // Ensure safeStringify is exported from content.ts
|
||||
import { PgBoss } from 'pg-boss';
|
||||
|
||||
const { getCache } = await import('../../../commons/cache/index.js');
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
|
||||
// --- Shared Helper ---
|
||||
export async function getProfileData(userId: string, options: { page: number, limit: number, sizesStr?: string, formatsStr?: string, useCache?: boolean, sortBy?: 'latest' | 'top' }, boss?: PgBoss) {
|
||||
const { page, limit, sizesStr, formatsStr, useCache, sortBy = 'latest' } = options;
|
||||
|
||||
const cacheKeyString = `profile-${userId}-p${page}-l${limit}-sort-${sortBy}`;
|
||||
let cacheKey = cacheKeyString;
|
||||
if (sizesStr) cacheKey += `-s${sizesStr}`;
|
||||
if (formatsStr) cacheKey += `-f${formatsStr}`;
|
||||
|
||||
const cache = getCache();
|
||||
|
||||
if (useCache !== false) {
|
||||
const cached = await cache.get(cacheKey);
|
||||
if (cached) return { data: cached, _cacheHit: true };
|
||||
}
|
||||
|
||||
const start = page * limit;
|
||||
const end = start + limit - 1;
|
||||
|
||||
const [profileRes, postsRes] = await Promise.all([
|
||||
supabase.from('profiles').select('id, user_id, username, display_name, avatar_url, bio, created_at, updated_at').eq('user_id', userId).single(),
|
||||
supabase.from('posts').select('*, pictures(*)').eq('user_id', userId).order('created_at', { ascending: false }).range(start, end)
|
||||
]);
|
||||
|
||||
if (profileRes.error) return null;
|
||||
|
||||
const posts = postsRes.data || [];
|
||||
const profilesMap = { [userId]: profileRes.data };
|
||||
|
||||
let augmentedPosts = await augmentPosts(supabase, posts, profilesMap, { sizesStr, formatsStr }, boss);
|
||||
|
||||
augmentedPosts = applyClientSortAndCovers(augmentedPosts, sortBy);
|
||||
|
||||
const result = {
|
||||
profile: profileRes.data,
|
||||
recentPosts: augmentedPosts
|
||||
};
|
||||
|
||||
await cache.set(cacheKey, result, 300);
|
||||
return { data: result, _cacheHit: false };
|
||||
}
|
||||
|
||||
/**
|
||||
* Efficiently fetches user profiles with individual caching strategy
|
||||
* Read-Through Cache: Checks cache for each ID, fetches missing, caches result.
|
||||
*/
|
||||
export async function fetchUserProfilesCached(
|
||||
supabase: any,
|
||||
userIds: string[]
|
||||
): Promise<Record<string, any>> {
|
||||
if (userIds.length === 0) return {};
|
||||
|
||||
// 1. Check Cache
|
||||
const cache = getCache();
|
||||
const CACHE_TTL = 300; // 5 minutes
|
||||
|
||||
const uniqueIds = [...new Set(userIds)];
|
||||
const missingIds: string[] = [];
|
||||
const profilesMap: Record<string, any> = {};
|
||||
|
||||
await Promise.all(uniqueIds.map(async (id) => {
|
||||
const key = `user-profile:${id}`;
|
||||
const cached = await cache.get<any>(key);
|
||||
if (cached) {
|
||||
profilesMap[id] = cached;
|
||||
} else {
|
||||
missingIds.push(id);
|
||||
}
|
||||
}));
|
||||
|
||||
if (missingIds.length === 0) {
|
||||
return profilesMap;
|
||||
}
|
||||
|
||||
// 2. Fetch Missing
|
||||
const { data: profiles } = await supabase
|
||||
.from('profiles')
|
||||
.select('user_id, username, display_name, avatar_url')
|
||||
.in('user_id', missingIds);
|
||||
|
||||
if (profiles) {
|
||||
await Promise.all(profiles.map(async (p: any) => {
|
||||
const key = `user-profile:${p.user_id}`;
|
||||
await cache.set(key, p, CACHE_TTL);
|
||||
profilesMap[p.user_id] = p;
|
||||
}));
|
||||
}
|
||||
|
||||
return profilesMap;
|
||||
}
|
||||
|
||||
// --- Handlers ---
|
||||
|
||||
export async function handleGetProfiles(c: Context) {
|
||||
try {
|
||||
const idsStr = c.req.query('ids');
|
||||
if (!idsStr) return c.json({});
|
||||
|
||||
const ids = idsStr.split(',').filter(Boolean);
|
||||
if (ids.length === 0) return c.json({});
|
||||
|
||||
// Check Cache first
|
||||
const cache = getCache();
|
||||
const cacheKey = `profiles-batch-${ids.sort().join('-')}`;
|
||||
|
||||
const cached = await cache.get(cacheKey);
|
||||
if (cached) {
|
||||
c.header('X-Cache', 'HIT');
|
||||
return c.json(cached);
|
||||
}
|
||||
|
||||
const { supabase } = await import('../../../commons/supabase.js');
|
||||
const { data, error } = await supabase
|
||||
.from('profiles')
|
||||
.select('user_id, username, display_name, avatar_url')
|
||||
.in('user_id', ids);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const profilesMap = (data || []).reduce((acc: any, p: any) => {
|
||||
acc[p.user_id] = p;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
// Cache for 5 minutes
|
||||
await cache.set(cacheKey, profilesMap, 300);
|
||||
|
||||
c.header('X-Cache', 'MISS');
|
||||
return c.json(profilesMap);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Batch profiles fetch failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetProfile(c: Context, boss?: PgBoss) {
|
||||
let userId = c.req.param('id');
|
||||
if (userId === 'default') userId = process.env.DEFAULT_USER_ID || '';
|
||||
|
||||
try {
|
||||
const page = parseInt(c.req.query('page') || '0');
|
||||
const limit = parseInt(c.req.query('limit') || '10');
|
||||
const sizesStr = c.req.query('sizes');
|
||||
const formatsStr = c.req.query('formats');
|
||||
const sortBy = c.req.query('sortBy') as 'latest' | 'top' || 'latest';
|
||||
const cache = c.req.query('cache') !== 'false';
|
||||
|
||||
const result = await getProfileData(userId, { page, limit, sizesStr, formatsStr, useCache: cache, sortBy }, boss);
|
||||
if (!result) return c.text('Profile not found', 404);
|
||||
|
||||
|
||||
c.header('X-Cache', result._cacheHit ? 'HIT' : 'MISS');
|
||||
return c.json(result.data);
|
||||
} catch (err: any) {
|
||||
logger.error({ err, userId }, 'API Profile fetch failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetProfilePage(c: Context, boss?: PgBoss) {
|
||||
let userId = c.req.param('id');
|
||||
if (userId === 'default') userId = process.env.DEFAULT_USER_ID || '';
|
||||
|
||||
try {
|
||||
const html = await loadTemplate();
|
||||
if (!html) return c.text('Template not found', 500);
|
||||
|
||||
if (process.env.INJECT_PROFILE_FEED === 'true') {
|
||||
const sortBy = c.req.query('sortBy') as 'latest' | 'top' || 'latest';
|
||||
// Default params for injection
|
||||
const result = await getProfileData(userId, { page: 0, limit: 10, useCache: true, sortBy }, boss);
|
||||
if (result) {
|
||||
const injectionScript = {
|
||||
id: 'initial-state',
|
||||
content: `window.__INITIAL_STATE__ = window.__INITIAL_STATE__ || {}; window.__INITIAL_STATE__.profile = ${safeStringify(result.data)}; `
|
||||
};
|
||||
const injected = inject(html, { scripts: [injectionScript] });
|
||||
return c.html(injected);
|
||||
}
|
||||
}
|
||||
return c.html(html);
|
||||
} catch (err) {
|
||||
logger.error({ err, userId }, 'Profile page injection failed');
|
||||
const html = await loadTemplate();
|
||||
return c.html(html || 'Error');
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleGetSecrets(c: Context) {
|
||||
// Get user from context (set by auth middleware)
|
||||
const user = c.get('user');
|
||||
if (!user) return c.text('Unauthorized', 401);
|
||||
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('user_secrets')
|
||||
.select('*')
|
||||
.eq('user_id', user.id)
|
||||
.single();
|
||||
|
||||
if (error) throw error;
|
||||
return c.json(data);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Secrets fetch failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
6
packages/shared/src/server/products/serving/db/index.ts
Normal file
6
packages/shared/src/server/products/serving/db/index.ts
Normal file
@ -0,0 +1,6 @@
|
||||
export * from './db-posts.js';
|
||||
export * from './db-pages.js';
|
||||
export * from './db-categories.js';
|
||||
export * from './db-types.js';
|
||||
export * from './db-user.js';
|
||||
export * from './db-layouts.js';
|
||||
186
packages/shared/src/server/products/serving/generators.ts
Normal file
186
packages/shared/src/server/products/serving/generators.ts
Normal file
@ -0,0 +1,186 @@
|
||||
import { Feed } from 'feed';
|
||||
|
||||
/**
|
||||
* Escape XML entities in URLs within the RSS feed
|
||||
* The feed library doesn't always properly escape URLs in enclosure tags
|
||||
*/
|
||||
function escapeXmlUrls(xml: string): string {
|
||||
// Fix unescaped ampersands in URLs within enclosure and other tags
|
||||
// Match url="..." patterns and escape & to & within them
|
||||
return xml.replace(/url="([^"]*)"/g, (match, url) => {
|
||||
const escapedUrl = url.replace(/&(?!amp;|lt;|gt;|quot;|apos;)/g, '&');
|
||||
return `url="${escapedUrl}"`;
|
||||
});
|
||||
}
|
||||
|
||||
export const generateRSS = (posts: any[]): string => {
|
||||
const baseUrl = process.env.SERVER_URL || 'https://polymech.info';
|
||||
|
||||
const feed = new Feed({
|
||||
title: "Polymech Feed",
|
||||
description: "Latest updates and photos",
|
||||
id: `${baseUrl}/`,
|
||||
link: `${baseUrl}/`,
|
||||
language: "en",
|
||||
image: `${baseUrl}/logo.png`,
|
||||
favicon: `${baseUrl}/favicon.ico`,
|
||||
copyright: "All rights reserved 2025, Polymech",
|
||||
updated: new Date(),
|
||||
generator: "Polymech Serving Product",
|
||||
feedLinks: {
|
||||
json: `${baseUrl}/feed.json`,
|
||||
atom: `${baseUrl}/feed.atom`
|
||||
},
|
||||
author: {
|
||||
name: "Polymech",
|
||||
email: "contact@polymech.info",
|
||||
link: baseUrl
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
posts.forEach(post => {
|
||||
// Use display_image (from OG image extraction) or fallback to first picture
|
||||
const imageUrl = post.display_image || (post.pictures && post.pictures[0]?.image_url);
|
||||
|
||||
// Create rich description with image for better RSS reader compatibility
|
||||
let description = post.description || '';
|
||||
let content = post.description || '';
|
||||
|
||||
// Add image to description if available (many RSS readers prefer this)
|
||||
if (imageUrl) {
|
||||
const imageHtml = `<img src="${imageUrl}" alt="${post.title || ''}" style="max-width: 100%; height: auto;" />`;
|
||||
description = imageHtml + (description ? `<br/><br/>${description}` : '');
|
||||
content = imageHtml + (content ? `<br/><br/>${content}` : '');
|
||||
}
|
||||
|
||||
feed.addItem({
|
||||
title: post.title,
|
||||
id: post.id,
|
||||
link: `${baseUrl}/post/${post.id}`,
|
||||
description: description,
|
||||
content: content,
|
||||
author: [
|
||||
{
|
||||
name: post.author_name || "Unknown",
|
||||
email: "user@polymech.info"
|
||||
}
|
||||
],
|
||||
date: new Date(post.created_at),
|
||||
image: imageUrl // This creates the enclosure tag
|
||||
});
|
||||
});
|
||||
|
||||
// Generate RSS and escape XML entities in URLs
|
||||
const rss = feed.rss2();
|
||||
return escapeXmlUrls(rss);
|
||||
};
|
||||
|
||||
export const generateMerchantXML = (products: any[]): string => {
|
||||
// Basic Google Merchant XML implementation
|
||||
let xml = `<?xml version="1.0"?>
|
||||
<rss xmlns:g="http://base.google.com/ns/1.0" version="2.0">
|
||||
<channel>
|
||||
<title>Polymech Store</title>
|
||||
<link>https://polymech.info</link>
|
||||
<description>Polymech Media Store</description>
|
||||
`;
|
||||
|
||||
products.forEach(p => {
|
||||
xml += `<item>
|
||||
<g:id>${p.id}</g:id>
|
||||
<g:title>${escapeXml(p.title)}</g:title>
|
||||
<g:description>${escapeXml(p.description)}</g:description>
|
||||
<g:link>https://polymech.info/post/${p.id}</g:link>
|
||||
<g:image_link>${p.image_url}</g:image_link>
|
||||
<g:condition>new</g:condition>
|
||||
<g:availability>in stock</g:availability>
|
||||
<g:price>0.00 USD</g:price>
|
||||
</item>
|
||||
`;
|
||||
});
|
||||
|
||||
xml += `</channel></rss>`;
|
||||
return xml;
|
||||
};
|
||||
|
||||
export const generateLLMText = (posts: any[]): string => {
|
||||
const baseUrl = process.env.SERVER_URL || 'https://polymech.info';
|
||||
|
||||
let md = `# Polymech Media Platform
|
||||
|
||||
> A full-stack media platform for sharing photos, videos, and articles with advanced content management, widget-based page building, and multi-format export capabilities.
|
||||
|
||||
Polymech is a modern media platform built with React, Hono, and Supabase. It supports rich content creation through a flexible widget system, category management, and multiple export formats (HTML, PDF, Markdown, JSON).
|
||||
|
||||
## Documentation
|
||||
|
||||
- [API Documentation](${baseUrl}/api/reference): Complete API reference with OpenAPI/Scalar interface
|
||||
- [Platform Overview](${baseUrl}/about.md): Architecture and key features
|
||||
|
||||
## Recent Content
|
||||
|
||||
`;
|
||||
|
||||
// Add up to 10 most recent posts/pages as examples
|
||||
const recentPosts = posts.slice(0, 10);
|
||||
recentPosts.forEach(p => {
|
||||
const url = `${baseUrl}/post/${p.id}`;
|
||||
const title = p.title || 'Untitled';
|
||||
const author = p.author_name || 'Unknown';
|
||||
md += `- [${title}](${url}): by ${author}\n`;
|
||||
});
|
||||
|
||||
md += `\n## Optional
|
||||
|
||||
- [RSS Feed](${baseUrl}/feed.xml): Subscribe to latest updates
|
||||
- [Sitemap](${baseUrl}/sitemap.xml): Complete site structure
|
||||
`;
|
||||
|
||||
return md;
|
||||
};
|
||||
|
||||
function escapeXml(unsafe: string | null | undefined): string {
|
||||
if (!unsafe) return '';
|
||||
return unsafe.replace(/[<>&'"]/g, (c) => {
|
||||
switch (c) {
|
||||
case '<': return '<';
|
||||
case '>': return '>';
|
||||
case '&': return '&';
|
||||
case '\'': return ''';
|
||||
case '"': return '"';
|
||||
default: return c;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export const generateSitemapXML = (pages: any[]): string => {
|
||||
let xml = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||
`;
|
||||
|
||||
pages.forEach(page => {
|
||||
// Construct URL based on page type/conventions
|
||||
// Assuming pages are accessible at /user/:userId/pages/:slug or similar
|
||||
// Adjust logic if "page-internal" means something specific regarding URL structure
|
||||
// Based on routes: /user/:userId/pages/:slug or /org/:orgSlug/user/:userId/pages/:slug
|
||||
|
||||
// For now, let's assume a standard user page URL structure
|
||||
// Ideally we should know the base URL, but generators are usually pure.
|
||||
// We'll use the one from RSS feed as base: https://polymech.info/
|
||||
|
||||
|
||||
let url = `https://polymech.info/user/${page.owner}/pages/${page.slug}`;
|
||||
|
||||
xml += ` <url>
|
||||
<loc>${escapeXml(url)}</loc>
|
||||
<lastmod>${new Date(page.updated_at || page.created_at).toISOString()}</lastmod>
|
||||
<changefreq>weekly</changefreq>
|
||||
<priority>0.8</priority>
|
||||
</url>
|
||||
`;
|
||||
});
|
||||
|
||||
xml += `</urlset>`;
|
||||
return xml;
|
||||
};
|
||||
110
packages/shared/src/server/products/serving/html-generator.ts
Normal file
110
packages/shared/src/server/products/serving/html-generator.ts
Normal file
@ -0,0 +1,110 @@
|
||||
/**
|
||||
* Generates a simple, static HTML page for bots/crawlers
|
||||
* Contains proper meta tags but no JavaScript/SPA functionality
|
||||
*/
|
||||
export function generateStaticHtml(options: {
|
||||
title: string;
|
||||
description: string;
|
||||
image?: string;
|
||||
author: string;
|
||||
content: string;
|
||||
canonicalUrl?: string;
|
||||
}): string {
|
||||
const { title, description, image, author, content, canonicalUrl } = options;
|
||||
|
||||
// Escape HTML entities
|
||||
const escapeHtml = (str: string) => str
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
|
||||
const safeTitle = escapeHtml(title);
|
||||
const safeDescription = escapeHtml(description);
|
||||
const safeAuthor = escapeHtml(author);
|
||||
|
||||
return `<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>${safeTitle}</title>
|
||||
|
||||
<!-- Primary Meta Tags -->
|
||||
<meta name="title" content="${safeTitle}">
|
||||
<meta name="description" content="${safeDescription}">
|
||||
<meta name="author" content="${safeAuthor}">
|
||||
${canonicalUrl ? `<link rel="canonical" href="${canonicalUrl}">` : ''}
|
||||
|
||||
<!-- Open Graph / Facebook -->
|
||||
<meta property="og:type" content="article">
|
||||
<meta property="og:title" content="${safeTitle}">
|
||||
<meta property="og:description" content="${safeDescription}">
|
||||
${image ? `<meta property="og:image" content="${image}">` : ''}
|
||||
${canonicalUrl ? `<meta property="og:url" content="${canonicalUrl}">` : ''}
|
||||
|
||||
<!-- Twitter -->
|
||||
<meta name="twitter:card" content="summary_large_image">
|
||||
<meta name="twitter:title" content="${safeTitle}">
|
||||
<meta name="twitter:description" content="${safeDescription}">
|
||||
${image ? `<meta name="twitter:image" content="${image}">` : ''}
|
||||
|
||||
<!-- Structured Data -->
|
||||
<script type="application/ld+json">
|
||||
{
|
||||
"@context": "https://schema.org",
|
||||
"@type": "Article",
|
||||
"headline": "${safeTitle}",
|
||||
"author": {
|
||||
"@type": "Person",
|
||||
"name": "${safeAuthor}"
|
||||
},
|
||||
"description": "${safeDescription}"
|
||||
${image ? `,\n "image": "${image}"` : ''}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif;
|
||||
line-height: 1.6;
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
padding: 2rem;
|
||||
color: #333;
|
||||
}
|
||||
h1, h2, h3, h4, h5, h6 {
|
||||
margin-top: 1.5em;
|
||||
margin-bottom: 0.5em;
|
||||
line-height: 1.3;
|
||||
}
|
||||
img {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
pre {
|
||||
background: #f5f5f5;
|
||||
padding: 1rem;
|
||||
overflow-x: auto;
|
||||
border-radius: 4px;
|
||||
}
|
||||
code {
|
||||
background: #f5f5f5;
|
||||
padding: 0.2em 0.4em;
|
||||
border-radius: 3px;
|
||||
font-family: 'Courier New', monospace;
|
||||
}
|
||||
blockquote {
|
||||
border-left: 4px solid #ddd;
|
||||
padding-left: 1rem;
|
||||
margin-left: 0;
|
||||
color: #666;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
${content}
|
||||
</body>
|
||||
</html>`;
|
||||
}
|
||||
629
packages/shared/src/server/products/serving/index.ts
Normal file
629
packages/shared/src/server/products/serving/index.ts
Normal file
@ -0,0 +1,629 @@
|
||||
import path from 'path';
|
||||
import { Context } from 'hono';
|
||||
import { getCache } from '../../commons/cache/index.js';
|
||||
import { fetchCategoryPathsForPages } from './db/db-categories.js';
|
||||
import { fetchPostDetailsServer } from './db/db-posts.js';
|
||||
const { supabase } = await import('../../commons/supabase.js');
|
||||
import { AbstractProduct } from '../AbstractProduct.js';
|
||||
|
||||
import {
|
||||
getFeedRoute,
|
||||
getMerchantFeedRoute,
|
||||
getLLMTextRoute,
|
||||
getPostMetaRoute,
|
||||
getApiFeedRoute,
|
||||
getApiPostDetailsRoute,
|
||||
getProfileRoute,
|
||||
getSecretsRoute,
|
||||
getProfilesRoute,
|
||||
getApiMediaItemsRoute,
|
||||
getHomeRoute,
|
||||
getProfilePageRoute,
|
||||
getEmbedRoute,
|
||||
getSiteInfoRoute,
|
||||
getSitemapRoute,
|
||||
postFlushCacheRoute,
|
||||
invalidateCacheRoute
|
||||
} from './routes.js';
|
||||
import {
|
||||
getUserPageRoute,
|
||||
getOrgUserPageRoute,
|
||||
getApiUserPageRoute,
|
||||
getUserPageContentRoute,
|
||||
getOrgUserPageContentRoute,
|
||||
getEmbedPageRoute,
|
||||
getUserPageMarkdownRoute,
|
||||
getUserPageHtmlRoute,
|
||||
getUserPagePdfRoute,
|
||||
getUserPageJsonRoute,
|
||||
renderPageContent,
|
||||
getCategoriesRoute,
|
||||
getCategoryRoute,
|
||||
createCategoryRoute,
|
||||
updateCategoryRoute,
|
||||
deleteCategoryRoute,
|
||||
handleGetCategories,
|
||||
handleGetCategory,
|
||||
handleCreateCategory,
|
||||
handleUpdateCategory,
|
||||
handleDeleteCategory,
|
||||
getTypesRoute,
|
||||
getTypeRoute,
|
||||
createTypeRoute,
|
||||
updateTypeRoute,
|
||||
deleteTypeRoute,
|
||||
handleGetTypes,
|
||||
handleGetType,
|
||||
handleCreateType,
|
||||
handleUpdateType,
|
||||
handleDeleteType,
|
||||
getLayoutsRoute,
|
||||
getLayoutRoute,
|
||||
createLayoutRoute,
|
||||
updateLayoutRoute,
|
||||
deleteLayoutRoute,
|
||||
handleGetLayouts,
|
||||
handleGetLayout,
|
||||
handleCreateLayout,
|
||||
handleUpdateLayout,
|
||||
handleDeleteLayout,
|
||||
handleGetProfiles,
|
||||
handleGetProfile,
|
||||
handleGetProfilePage,
|
||||
handleGetSecrets
|
||||
} from './db/index.js';
|
||||
import { CachedHandler } from '../../commons/decorators.js';
|
||||
import {
|
||||
handleGetFeedXml,
|
||||
handleGetMerchantFeed,
|
||||
handleGetLLMText,
|
||||
handleGetSitemap,
|
||||
handleGetSiteInfo,
|
||||
handleGetPostMeta,
|
||||
handleGetEmbed,
|
||||
handleGetEmbedPage,
|
||||
augmentPosts,
|
||||
applyClientSortAndCovers
|
||||
} from './content.js';
|
||||
import {
|
||||
handleGetApiUserPage,
|
||||
handleGetUserPageMarkdown,
|
||||
handleGetUserPageHtml,
|
||||
handleGetUserPagePdf,
|
||||
handleGetUserPageJson,
|
||||
handleGetPageContent,
|
||||
handleGetPageMeta
|
||||
} from './db/index.js';
|
||||
import {
|
||||
getPostPdfRoute,
|
||||
getPostJsonRoute,
|
||||
getPostHtmlRoute,
|
||||
getPostMarkdownRoute,
|
||||
handleGetPostPdf,
|
||||
handleGetPostJson,
|
||||
handleGetPostHtml,
|
||||
handleGetPostMarkdown
|
||||
} from './db/db-post-exports.js';
|
||||
import { generateRSS, generateMerchantXML, generateLLMText, generateSitemapXML } from './generators.js';
|
||||
import { generateMarkdownFromPage } from './markdown-generator.js';
|
||||
import { loadTemplate, inject, extractDisplayImage } from './renderer.js';
|
||||
import { logger } from './logger.js';
|
||||
import { ensureCachedImage, CACHE_DIR } from '../images/index.js';
|
||||
import fs from 'fs/promises';
|
||||
import { PgBoss } from 'pg-boss';
|
||||
import { VIDEO_JOB_NAME } from '../videos/worker.js';
|
||||
|
||||
|
||||
// Helper to escape JSON for safe HTML injection
|
||||
const safeStringify = (data: any) => {
|
||||
return JSON.stringify(data).replace(/</g, '\\u003c');
|
||||
};
|
||||
|
||||
export class ServingProduct extends AbstractProduct<any> {
|
||||
id = 'serving';
|
||||
jobOptions = {};
|
||||
actions = {};
|
||||
workers = [];
|
||||
routes: any[] = [];
|
||||
private boss?: PgBoss;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.initializeRoutes();
|
||||
}
|
||||
|
||||
hash(data: any): string {
|
||||
return 'serving-hash';
|
||||
}
|
||||
|
||||
meta(userId: string): any {
|
||||
return { userId };
|
||||
}
|
||||
|
||||
initializeRoutes() {
|
||||
// Feature Flags
|
||||
const enableRss = process.env.ENABLE_RSS_FEED === 'true';
|
||||
const enableMerchant = process.env.ENABLE_MERCHANT_FEED === 'true';
|
||||
const enableLlm = process.env.ENABLE_LLM_TXT === 'true';
|
||||
const enableMeta = process.env.ENABLE_META_INJECTION === 'true';
|
||||
|
||||
// Legacy/SEO Routes
|
||||
if (enableRss) this.routes.push({ definition: getFeedRoute, handler: handleGetFeedXml });
|
||||
if (enableMerchant) this.routes.push({ definition: getMerchantFeedRoute, handler: handleGetMerchantFeed });
|
||||
if (enableLlm) this.routes.push({ definition: getLLMTextRoute, handler: handleGetLLMText });
|
||||
// Post Export Routes (Must be before Generic Post Route)
|
||||
this.routes.push({ definition: getPostPdfRoute, handler: handleGetPostPdf });
|
||||
this.routes.push({ definition: getPostJsonRoute, handler: handleGetPostJson });
|
||||
this.routes.push({ definition: getPostHtmlRoute, handler: handleGetPostHtml });
|
||||
this.routes.push({ definition: getPostMarkdownRoute, handler: handleGetPostMarkdown });
|
||||
|
||||
if (enableMeta) this.routes.push({ definition: getPostMetaRoute, handler: CachedHandler(handleGetPostMeta) });
|
||||
|
||||
// New API Routes (Always Enabled)
|
||||
this.routes.push({ definition: getApiFeedRoute, handler: CachedHandler(this.handleGetApiFeed.bind(this)), cache: { varyByAuth: true } });
|
||||
this.routes.push({ definition: getApiPostDetailsRoute, handler: this.handleGetApiPostDetails.bind(this), cache: { varyByAuth: true } });
|
||||
this.routes.push({ definition: getProfileRoute, handler: (c: Context) => handleGetProfile(c, this.boss) });
|
||||
this.routes.push({ definition: getSecretsRoute, handler: handleGetSecrets });
|
||||
this.routes.push({ definition: getApiUserPageRoute, handler: CachedHandler(handleGetApiUserPage, { varyByAuth: true }) });
|
||||
this.routes.push({ definition: postFlushCacheRoute, handler: this.handlePostFlushCache.bind(this) });
|
||||
this.routes.push({ definition: invalidateCacheRoute, handler: this.handleInvalidateCache.bind(this) });
|
||||
|
||||
// Batch Profiles
|
||||
this.routes.push({ definition: getProfilesRoute, handler: CachedHandler(handleGetProfiles, { varyByAuth: true }) });
|
||||
|
||||
// Batch Media Items
|
||||
this.routes.push({ definition: getApiMediaItemsRoute, handler: CachedHandler(this.handleGetApiMediaItems.bind(this), { varyByAuth: false }) });
|
||||
|
||||
// Page Export Routes (Must be before Generic Page Route)
|
||||
this.routes.push({ definition: getUserPageMarkdownRoute, handler: handleGetUserPageMarkdown });
|
||||
this.routes.push({ definition: getUserPageHtmlRoute, handler: handleGetUserPageHtml });
|
||||
this.routes.push({ definition: getUserPagePdfRoute, handler: handleGetUserPagePdf });
|
||||
this.routes.push({ definition: getUserPageJsonRoute, handler: handleGetUserPageJson });
|
||||
|
||||
|
||||
|
||||
// HTML Injection Routes
|
||||
this.routes.push({ definition: getHomeRoute, handler: this.handleGetHome.bind(this) });
|
||||
this.routes.push({ definition: getProfilePageRoute, handler: (c: Context) => handleGetProfilePage(c, this.boss) });
|
||||
this.routes.push({ definition: getUserPageRoute, handler: handleGetPageMeta });
|
||||
//this.routes.push({ definition: getOrgUserPageRoute, handler: CachedHandler(handleGetPageMeta) });
|
||||
|
||||
// Content Routes
|
||||
this.routes.push({ definition: getUserPageContentRoute, handler: handleGetPageContent });
|
||||
this.routes.push({ definition: getOrgUserPageContentRoute, handler: handleGetPageContent });
|
||||
|
||||
// Embed Route
|
||||
this.routes.push({ definition: getEmbedRoute, handler: CachedHandler((c: Context) => handleGetEmbed(c, this.boss)) });
|
||||
this.routes.push({ definition: getEmbedPageRoute, handler: CachedHandler(handleGetEmbedPage) });
|
||||
|
||||
// Site Info Route
|
||||
this.routes.push({ definition: getSiteInfoRoute, handler: handleGetSiteInfo });
|
||||
|
||||
// Sitemap Route
|
||||
this.routes.push({ definition: getSitemapRoute, handler: handleGetSitemap });
|
||||
|
||||
// Category CRUD Routes
|
||||
this.routes.push({ definition: getCategoriesRoute, handler: handleGetCategories });
|
||||
this.routes.push({ definition: getCategoryRoute, handler: handleGetCategory });
|
||||
this.routes.push({ definition: createCategoryRoute, handler: handleCreateCategory });
|
||||
this.routes.push({ definition: updateCategoryRoute, handler: handleUpdateCategory });
|
||||
this.routes.push({ definition: deleteCategoryRoute, handler: handleDeleteCategory });
|
||||
|
||||
// Types Routes
|
||||
this.routes.push({ definition: getTypesRoute, handler: handleGetTypes });
|
||||
this.routes.push({ definition: getTypeRoute, handler: handleGetType });
|
||||
this.routes.push({ definition: createTypeRoute, handler: handleCreateType });
|
||||
this.routes.push({ definition: updateTypeRoute, handler: handleUpdateType });
|
||||
this.routes.push({ definition: deleteTypeRoute, handler: handleDeleteType });
|
||||
|
||||
// Layouts Routes
|
||||
this.routes.push({ definition: getLayoutsRoute, handler: handleGetLayouts });
|
||||
this.routes.push({ definition: getLayoutRoute, handler: handleGetLayout });
|
||||
this.routes.push({ definition: createLayoutRoute, handler: handleCreateLayout });
|
||||
this.routes.push({ definition: updateLayoutRoute, handler: handleUpdateLayout });
|
||||
this.routes.push({ definition: deleteLayoutRoute, handler: handleDeleteLayout });
|
||||
}
|
||||
|
||||
async handleGetApiFeed(c: Context) {
|
||||
try {
|
||||
|
||||
const { fetchFeedPostsServer } = await import('./db/index.js');
|
||||
const { supabase } = await import('../../commons/supabase.js');
|
||||
|
||||
const page = parseInt(c.req.query('page') || '0');
|
||||
const limit = parseInt(c.req.query('limit') || '30');
|
||||
const source = c.req.query('source') as any || 'home';
|
||||
const sourceId = c.req.query('sourceId');
|
||||
const isOrgContext = c.req.query('isOrgContext') === 'true';
|
||||
const orgSlug = c.req.query('orgSlug');
|
||||
const sizesStr = c.req.query('sizes');
|
||||
const formatsStr = c.req.query('formats');
|
||||
const sortBy = c.req.query('sortBy') as 'latest' | 'top' || 'latest';
|
||||
const cache = c.req.query('cache') !== 'false';
|
||||
|
||||
// Extract category filters
|
||||
const categoryIdsParam = c.req.query('categoryIds');
|
||||
const categorySlugsParam = c.req.query('categorySlugs');
|
||||
const categoryIds = categoryIdsParam ? categoryIdsParam.split(',') : undefined;
|
||||
const categorySlugs = categorySlugsParam ? categorySlugsParam.split(',') : undefined;
|
||||
|
||||
// Extract user ID from Auth Token (if present)
|
||||
let userId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
userId = user?.id;
|
||||
}
|
||||
|
||||
// Cache Key
|
||||
let cacheKey = `${source}-feed`;
|
||||
if (sourceId) cacheKey += `-${sourceId}`;
|
||||
if (isOrgContext) cacheKey += `-org-${orgSlug}`;
|
||||
if (userId) cacheKey += `-u${userId}`;
|
||||
cacheKey += `-p${page}-l${limit}-sort-${sortBy}`;
|
||||
if (sizesStr) cacheKey += `-s${sizesStr}`;
|
||||
if (formatsStr) cacheKey += `-f${formatsStr}`;
|
||||
if (categoryIds) cacheKey += `-catIds${categoryIds.join(',')}`;
|
||||
if (categorySlugs) cacheKey += `-catSlugs${categorySlugs.join(',')}`;
|
||||
|
||||
const { getCache } = await import('../../commons/cache/index.js');
|
||||
const cacheInstance = getCache();
|
||||
|
||||
if (cache) {
|
||||
const cached = await cacheInstance.get(cacheKey);
|
||||
if (cached) {
|
||||
c.header('X-Cache', 'HIT');
|
||||
return c.json(cached);
|
||||
}
|
||||
}
|
||||
|
||||
const data = await fetchFeedPostsServer(supabase, {
|
||||
page,
|
||||
limit,
|
||||
sizesStr,
|
||||
formatsStr,
|
||||
sortBy,
|
||||
sourceId,
|
||||
isOrgContext,
|
||||
orgSlug,
|
||||
userId,
|
||||
categoryIds,
|
||||
categorySlugs
|
||||
}, this.boss);
|
||||
|
||||
if (cache) {
|
||||
await cacheInstance.set(cacheKey, data, 100);
|
||||
}
|
||||
|
||||
c.header('X-Cache', 'MISS');
|
||||
return c.json(data);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'API Feed fetch failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
async handleGetApiPostDetails(c: Context) {
|
||||
try {
|
||||
const id = c.req.param('id');
|
||||
const sizesStr = c.req.query('sizes');
|
||||
const formatsStr = c.req.query('formats');
|
||||
|
||||
// Extract user ID from Auth Token (if present)
|
||||
let userId: string | undefined;
|
||||
const authHeader = c.req.header('Authorization');
|
||||
if (authHeader) {
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const { data: { user } } = await supabase.auth.getUser(token);
|
||||
userId = user?.id;
|
||||
}
|
||||
|
||||
const data = await fetchPostDetailsServer(supabase, id, { sizesStr, formatsStr, userId }, this.boss);
|
||||
|
||||
if (!data) {
|
||||
return c.json({ error: 'Post not found' }, 404);
|
||||
}
|
||||
|
||||
return c.json(data);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'API Post Details fetch failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
async handleGetApiMediaItems(c: Context) {
|
||||
try {
|
||||
const idsParam = c.req.query('ids');
|
||||
if (!idsParam) {
|
||||
return c.json({ error: 'ids parameter is required' }, 400);
|
||||
}
|
||||
|
||||
const ids = idsParam.split(',').filter(Boolean);
|
||||
const maintainOrder = c.req.query('maintainOrder') === 'true';
|
||||
const sizesStr = c.req.query('sizes');
|
||||
const formatsStr = c.req.query('formats');
|
||||
|
||||
const { fetchMediaItemsByIdsServer } = await import('./db/db-posts.js');
|
||||
const data = await fetchMediaItemsByIdsServer(
|
||||
supabase,
|
||||
ids,
|
||||
{ maintainOrder, sizesStr, formatsStr },
|
||||
this.boss
|
||||
);
|
||||
|
||||
return c.json(data);
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'API Media Items fetch failed');
|
||||
return c.text('Internal Server Error', 500);
|
||||
}
|
||||
}
|
||||
|
||||
async handlePostFlushCache(c: Context) {
|
||||
try {
|
||||
const cache = getCache();
|
||||
|
||||
if (cache) {
|
||||
// 1. Flush Memory/Redis Cache
|
||||
await cache.flush();
|
||||
}
|
||||
|
||||
// 2. Flush Category Cache
|
||||
const { flushCategoryCache } = await import('./db/db-categories.js');
|
||||
flushCategoryCache();
|
||||
|
||||
// 3. Flush Posts Cache
|
||||
const { flushPostsCache } = await import('./db/db-posts.js');
|
||||
flushPostsCache();
|
||||
|
||||
// 4. Flush Image Disk Cache
|
||||
try {
|
||||
// Check if directory exists first to avoid error
|
||||
await fs.access(CACHE_DIR);
|
||||
await fs.rm(CACHE_DIR, { recursive: true, force: true });
|
||||
await fs.mkdir(CACHE_DIR, { recursive: true });
|
||||
} catch (err) {
|
||||
// If it doesn't exist, just create it
|
||||
await fs.mkdir(CACHE_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
logger.info('Cache flushed (Content + Images)');
|
||||
return c.json({ success: true, message: 'Cache flushed successfully' });
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Cache flush failed');
|
||||
return c.json({ success: false, message: 'Failed to flush cache' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async handleInvalidateCache(c: Context) {
|
||||
try {
|
||||
const body = await c.req.json();
|
||||
const paths = body.paths as string[];
|
||||
|
||||
const cache = getCache();
|
||||
|
||||
let count = 0;
|
||||
if (Array.isArray(paths)) {
|
||||
for (const path of paths) {
|
||||
// Pattern: "auto-cache:GET:/api/user-page/UID/SLUG"
|
||||
// We assume the user sends the PATH component.
|
||||
const pattern = `auto-cache:GET:${path}`;
|
||||
await cache.flush(pattern);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
logger.info({ count, paths }, 'Cache invalidated by path');
|
||||
return c.json({ success: true, count });
|
||||
} catch (err: any) {
|
||||
logger.error({ err }, 'Cache invalidation failed');
|
||||
return c.json({ success: false, message: 'Invalidation failed' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
async handleGetHome(c: Context) {
|
||||
try {
|
||||
// 1. Load HTML Template
|
||||
const html = await loadTemplate();
|
||||
if (!html) return c.text('handleGetHome:Template not found', 500);
|
||||
|
||||
// 2. Check Injection Flag
|
||||
if (process.env.INJECT_FEED === 'true') {
|
||||
const sortBy = c.req.query('sortBy') as 'latest' | 'top' || 'latest';
|
||||
|
||||
// Reuse feed logic: page 0, limit 30 (defaults)
|
||||
const { data } = await this.getFeedData({ page: 0, limit: 30, sortBy, useCache: false });
|
||||
const injectionScript = {
|
||||
id: 'initial-state',
|
||||
content: `window.__INITIAL_STATE__ = window.__INITIAL_STATE__ || {}; window.__INITIAL_STATE__.feed = ${safeStringify(data)};`
|
||||
};
|
||||
|
||||
const injected = inject(html, { scripts: [injectionScript] });
|
||||
// Ensure we return the INJECTED html
|
||||
return c.html(injected);
|
||||
}
|
||||
|
||||
// Return plain HTML if injection disabled
|
||||
return c.html(html);
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Home page injection failed');
|
||||
const html = await loadTemplate();
|
||||
return c.html(html || 'Error');
|
||||
}
|
||||
}
|
||||
|
||||
private async getFeedData(options: { page: number, limit: number, sizesStr?: string, formatsStr?: string, sortBy?: 'latest' | 'top', useCache?: boolean }) {
|
||||
const { page, limit, sizesStr, formatsStr, sortBy = 'latest', useCache } = options;
|
||||
const start = page * limit;
|
||||
const end = start + limit - 1;
|
||||
|
||||
const cache = getCache();
|
||||
|
||||
// Cache Key includes params
|
||||
let cacheKey = `home-feed-p${page}-l${limit}-sort-${sortBy}`;
|
||||
if (sizesStr) cacheKey += `-s${sizesStr}`;
|
||||
if (formatsStr) cacheKey += `-f${formatsStr}`;
|
||||
|
||||
if (useCache === true) { // Default false for now to debug
|
||||
const cached = await cache.get(cacheKey);
|
||||
if (cached) {
|
||||
return { data: cached, _cacheHit: true };
|
||||
}
|
||||
}
|
||||
|
||||
// 1. Fetch Posts & Pages in Parallel
|
||||
const [postsRes, pagesRes] = await Promise.all([
|
||||
supabase
|
||||
.from('posts')
|
||||
.select('*, pictures(*)')
|
||||
.order('created_at', { ascending: false })
|
||||
.range(start, end),
|
||||
supabase
|
||||
.from('pages')
|
||||
.select('*')
|
||||
.eq('is_public', true)
|
||||
.eq('visible', true)
|
||||
.order('created_at', { ascending: false })
|
||||
.range(start, end)
|
||||
]);
|
||||
|
||||
if (postsRes.error) throw postsRes.error;
|
||||
|
||||
const posts = postsRes.data || [];
|
||||
const pages = pagesRes.data || [];
|
||||
|
||||
// 1.5 Extract Page Images (to find display image for pages)
|
||||
const pageImageIds: string[] = [];
|
||||
const pageIdToImageId = new Map<string, string>();
|
||||
const pageIdToDirectUrl = new Map<string, string>();
|
||||
|
||||
pages.forEach((page: any) => {
|
||||
let content = page.content;
|
||||
if (typeof content === 'string') {
|
||||
try { content = JSON.parse(content); } catch (e) { }
|
||||
}
|
||||
|
||||
// DEBUG: Write content to file
|
||||
try {
|
||||
// await fs.writeFile(path.join(process.cwd(), 'debug_page_content.json'), JSON.stringify(content, null, 2));
|
||||
} catch (e) { console.error('Failed to write debug file', e); }
|
||||
|
||||
const candidates = extractDisplayImage(content);
|
||||
if (candidates) {
|
||||
if (candidates.pictureId) {
|
||||
pageImageIds.push(candidates.pictureId);
|
||||
pageIdToImageId.set(page.id, candidates.pictureId);
|
||||
} else if (candidates.imageUrl) {
|
||||
pageIdToDirectUrl.set(page.id, candidates.imageUrl);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const pageImages = new Map<string, string>();
|
||||
if (pageImageIds.length > 0) {
|
||||
logger.debug({ pageImageIds }, 'Fetching page images');
|
||||
const { data: pics, error: picsError } = await supabase
|
||||
.from('pictures')
|
||||
.select('id, image_url')
|
||||
.in('id', pageImageIds);
|
||||
|
||||
if (picsError) {
|
||||
logger.error({ err: picsError }, 'Failed to fetch page images');
|
||||
} else {
|
||||
logger.debug({ count: pics?.length }, 'Fetched page images');
|
||||
pics?.forEach((p: any) => pageImages.set(p.id, p.image_url));
|
||||
}
|
||||
} else {
|
||||
// logger.debug('No page images to fetch');
|
||||
}
|
||||
|
||||
// 1.6. Fetch Category Paths for Pages
|
||||
|
||||
const categoryPathsMap = await fetchCategoryPathsForPages(supabase, pages);
|
||||
|
||||
// 2. Transform Pages to FeedPost format
|
||||
const transformedPages = pages.map((page: any) => {
|
||||
let displayImage = "https://picsum.photos/640";
|
||||
const requiredPicId = pageIdToImageId.get(page.id);
|
||||
const directUrl = pageIdToDirectUrl.get(page.id);
|
||||
|
||||
if (requiredPicId) {
|
||||
const foundUrl = pageImages.get(requiredPicId);
|
||||
if (foundUrl) displayImage = foundUrl;
|
||||
} else if (directUrl) {
|
||||
displayImage = directUrl;
|
||||
}
|
||||
|
||||
const virtualPic = {
|
||||
id: page.id,
|
||||
picture_id: page.id,
|
||||
title: page.title,
|
||||
description: null,
|
||||
image_url: displayImage,
|
||||
thumbnail_url: null,
|
||||
type: 'page-intern',
|
||||
meta: { slug: page.slug },
|
||||
created_at: page.created_at,
|
||||
user_id: page.owner,
|
||||
likes_count: 0,
|
||||
comments: [{ count: 0 }],
|
||||
visible: true,
|
||||
is_selected: false,
|
||||
position: 0
|
||||
};
|
||||
|
||||
// Get category paths for this page
|
||||
const categoryPaths = categoryPathsMap.get(page.id) || [];
|
||||
|
||||
return {
|
||||
id: page.id,
|
||||
title: page.title,
|
||||
description: null,
|
||||
created_at: page.created_at,
|
||||
user_id: page.owner,
|
||||
pictures: [virtualPic],
|
||||
cover: virtualPic,
|
||||
likes_count: 0,
|
||||
comments_count: 0,
|
||||
type: 'page-intern',
|
||||
category_paths: categoryPaths
|
||||
};
|
||||
});
|
||||
|
||||
// 3. Merge Posts and Pages
|
||||
const allItems = [...posts, ...transformedPages];
|
||||
const userIds = Array.from(new Set(allItems.map((p: any) => p.user_id || p.owner).filter(Boolean)));
|
||||
|
||||
let profilesMap: Record<string, any> = {};
|
||||
if (userIds.length > 0) {
|
||||
const { data: profiles } = await supabase
|
||||
.from('profiles')
|
||||
.select('user_id, username, display_name, avatar_url')
|
||||
.in('user_id', userIds);
|
||||
|
||||
if (profiles) {
|
||||
profilesMap = profiles.reduce((acc, p) => ({ ...acc, [p.user_id]: p }), {});
|
||||
}
|
||||
}
|
||||
|
||||
let augmentedPosts = await augmentPosts(supabase, allItems, profilesMap, { sizesStr, formatsStr }, this.boss);
|
||||
|
||||
// 4. Apply Client-Side Sort/Cover Logic
|
||||
augmentedPosts = applyClientSortAndCovers(augmentedPosts, sortBy);
|
||||
|
||||
// Cache for 60 seconds
|
||||
await cache.set(cacheKey, augmentedPosts, 60);
|
||||
|
||||
return { data: augmentedPosts, _cacheHit: false };
|
||||
}
|
||||
async onStart(boss?: PgBoss) {
|
||||
this.boss = boss;
|
||||
|
||||
// Example: Register a custom handler
|
||||
if (process.env.ENABLE_WEBSOCKETS === 'true') {
|
||||
const { WebSocketManager } = await import('../../commons/websocket.js');
|
||||
WebSocketManager.getInstance().registerHandler('serving-echo', (ws, payload) => {
|
||||
ws.send(JSON.stringify({ type: 'serving-echo-response', original: payload }));
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
30
packages/shared/src/server/products/serving/logger.ts
Normal file
30
packages/shared/src/server/products/serving/logger.ts
Normal file
@ -0,0 +1,30 @@
|
||||
import pino from 'pino';
|
||||
import path from 'path';
|
||||
|
||||
const logFile = path.join(process.cwd(), 'logs', 'serving.json');
|
||||
|
||||
const fileTransport = pino.transport({
|
||||
target: 'pino/file',
|
||||
options: { destination: logFile, mkdir: true }
|
||||
});
|
||||
|
||||
const consoleTransport = pino.transport({
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
colorize: true,
|
||||
ignore: 'pid,hostname',
|
||||
destination: 1,
|
||||
},
|
||||
});
|
||||
|
||||
export const logger = pino(
|
||||
{
|
||||
level: process.env.PINO_LOG_LEVEL || 'info',
|
||||
base: { product: 'serving' },
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
},
|
||||
pino.multistream([
|
||||
{ stream: fileTransport, level: 'info' },
|
||||
{ stream: consoleTransport, level: 'info' },
|
||||
])
|
||||
);
|
||||
@ -0,0 +1,99 @@
|
||||
|
||||
interface PageContent {
|
||||
pages?: Record<string, any>;
|
||||
containers?: any[];
|
||||
widgets?: any[];
|
||||
}
|
||||
|
||||
export const generateMarkdownFromPage = (page: any, authorName?: string): string => {
|
||||
const content = page.content;
|
||||
const title = page.title || 'Untitled';
|
||||
const slug = page.slug || 'page';
|
||||
|
||||
// 1. Process Content
|
||||
let markdownBody = '';
|
||||
|
||||
try {
|
||||
if (!content) {
|
||||
markdownBody = '';
|
||||
} else if (typeof content === 'string') {
|
||||
markdownBody = content;
|
||||
} else {
|
||||
// Determine content root
|
||||
let root = content;
|
||||
if (content.pages) {
|
||||
// Try to find the page by ID or take the first one
|
||||
const pageIdKey = `page-${page.id}`;
|
||||
if (content.pages[pageIdKey]) {
|
||||
root = content.pages[pageIdKey];
|
||||
} else {
|
||||
// Fallback: take first key
|
||||
const keys = Object.keys(content.pages);
|
||||
if (keys.length > 0) root = content.pages[keys[0]];
|
||||
}
|
||||
}
|
||||
|
||||
// Traverse containers
|
||||
if (root.containers && Array.isArray(root.containers)) {
|
||||
root.containers.forEach((container: any) => {
|
||||
if (container.widgets && Array.isArray(container.widgets)) {
|
||||
container.widgets.forEach((widget: any) => {
|
||||
if (widget.widgetId === 'markdown-text' && widget.props && widget.props.content) {
|
||||
markdownBody += widget.props.content + '\n\n';
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
} else if (root.widgets && Array.isArray(root.widgets)) { // Fallback for simple structure
|
||||
root.widgets.forEach((widget: any) => {
|
||||
if (widget.widgetId === 'markdown-text' && widget.props && widget.props.content) {
|
||||
markdownBody += widget.props.content + '\n\n';
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error parsing page content for markdown generation:', e);
|
||||
markdownBody = JSON.stringify(content, null, 2); // Fallback
|
||||
}
|
||||
|
||||
// 2. Generate TOC
|
||||
const getSlug = (text: string) => text.toLowerCase().replace(/[^\w\s-]/g, '').replace(/\s+/g, '-');
|
||||
const lines = markdownBody.split('\n');
|
||||
let toc = '# Table of Contents\n\n';
|
||||
let hasHeadings = false;
|
||||
|
||||
lines.forEach(line => {
|
||||
// Determine header level
|
||||
const match = line.match(/^(#{1,3})\s+(.+)/);
|
||||
if (match) {
|
||||
hasHeadings = true;
|
||||
const level = match[1].length;
|
||||
const text = match[2];
|
||||
const headerSlug = getSlug(text);
|
||||
const indent = ' '.repeat(level - 1);
|
||||
toc += `${indent}- [${text}](#${headerSlug})\n`;
|
||||
}
|
||||
});
|
||||
|
||||
let finalContent = markdownBody;
|
||||
if (hasHeadings) {
|
||||
finalContent = `${toc}\n---\n\n${markdownBody}`;
|
||||
}
|
||||
|
||||
// 3. Add Frontmatter (Optional, but good for context)
|
||||
// Matching the "Export Astro" or general metadata style
|
||||
const safeTitle = title.replace(/"/g, '\\"');
|
||||
const dateStr = new Date().toISOString().split('T')[0];
|
||||
|
||||
const frontMatter = `---
|
||||
title: "${safeTitle}"
|
||||
slug: "${slug}"
|
||||
date: "${dateStr}"
|
||||
author: "${authorName || page.owner || 'unknown'}"
|
||||
---
|
||||
|
||||
`;
|
||||
|
||||
return frontMatter + finalContent;
|
||||
};
|
||||
201
packages/shared/src/server/products/serving/renderer.ts
Normal file
201
packages/shared/src/server/products/serving/renderer.ts
Normal file
@ -0,0 +1,201 @@
|
||||
import { Context } from 'hono';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import { logger } from '@/commons/logger.js';
|
||||
|
||||
export const loadTemplate = async (filename: string = 'index.html'): Promise<string | null> => {
|
||||
// Basic cache key strategy - simple map if needed, but for now we only have index.html and embed.html
|
||||
// If we need better caching for multiple files, we should change the cache structure.
|
||||
// For minimal change, let's just cache them separately or ignore cache for non-index for now (or make cache a Map)
|
||||
|
||||
// Quick fix: Separate cache variable for embed or map
|
||||
// Ideally refactor into a map.
|
||||
|
||||
return loadTemplateInternal(filename);
|
||||
};
|
||||
|
||||
// Internal Cache Map
|
||||
const templateCache = new Map<string, { content: string, time: number }>();
|
||||
const CACHE_TTL = 60000; // 1 minute
|
||||
|
||||
const loadTemplateInternal = async (filename: string): Promise<string | null> => {
|
||||
const now = Date.now();
|
||||
const cached = templateCache.get(filename);
|
||||
|
||||
if (cached && (now - cached.time < CACHE_TTL)) {
|
||||
return cached.content;
|
||||
}
|
||||
|
||||
const distPath = process.env.CLIENT_DIST_PATH || path.join(process.cwd(), '../dist');
|
||||
// If embed.html, it might be in dist/client/embed/ or just custom path
|
||||
// Our build puts it in dist/client/embed/embed.html relative to root?
|
||||
// Wait, vite config said: outDir: 'dist/client/embed'.
|
||||
// Standard dist is 'dist'.
|
||||
|
||||
// Let's assume standardized structure or handle path logic in caller?
|
||||
// Actually, let's just look in distPath for now, but handle subfolders if needed.
|
||||
// Given the previous steps: outDir: 'dist/client/embed'.
|
||||
// And standard build usually goes to `dist`.
|
||||
|
||||
let filePath = path.join(distPath, filename);
|
||||
|
||||
// Special handling for embed if it is in a subdir
|
||||
if (filename === 'embed.html') {
|
||||
// Try exact known location first
|
||||
filePath = path.resolve(process.cwd(), '../dist/client/embed/embed.html');
|
||||
}
|
||||
|
||||
try {
|
||||
const content = await fs.readFile(filePath, 'utf-8');
|
||||
templateCache.set(filename, { content, time: now });
|
||||
return content;
|
||||
} catch (err) {
|
||||
logger.warn({ err, filePath, cwd: process.cwd() }, `Failed to load template ${filename} at primary path.`);
|
||||
|
||||
// Fallback attempts
|
||||
const fallbackPaths = [
|
||||
path.join(distPath, 'client/embed', filename),
|
||||
path.join(process.cwd(), '../dist/client/embed', filename),
|
||||
path.join(process.cwd(), 'dist/client/embed', filename), // In case server cwd is root
|
||||
];
|
||||
|
||||
for (const altPath of fallbackPaths) {
|
||||
try {
|
||||
const content = await fs.readFile(altPath, 'utf-8');
|
||||
templateCache.set(filename, { content, time: now });
|
||||
logger.info({ altPath }, `Found template ${filename} at fallback path.`);
|
||||
return content;
|
||||
} catch (e) {
|
||||
// components
|
||||
}
|
||||
}
|
||||
|
||||
logger.error({ filename, attemptedPaths: [filePath, ...fallbackPaths] }, `Template ${filename} could not be found anywhere.`);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export interface PageMeta {
|
||||
title?: string;
|
||||
description?: string;
|
||||
image?: string;
|
||||
jsonLd?: any;
|
||||
scripts?: { content: string; id?: string }[];
|
||||
}
|
||||
|
||||
import { load } from 'cheerio';
|
||||
|
||||
export const inject = (html: string, meta: PageMeta): string => {
|
||||
const $ = load(html);
|
||||
|
||||
if (meta.title) {
|
||||
$('title').text(meta.title);
|
||||
$('meta[property="og:title"]').attr('content', meta.title);
|
||||
$('meta[name="twitter:title"]').attr('content', meta.title);
|
||||
}
|
||||
|
||||
if (meta.description) {
|
||||
$('meta[name="description"]').attr('content', meta.description);
|
||||
$('meta[property="og:description"]').attr('content', meta.description);
|
||||
}
|
||||
|
||||
if (meta.image) {
|
||||
$('meta[property="og:image"]').attr('content', meta.image);
|
||||
$('meta[name="twitter:image"]').attr('content', meta.image);
|
||||
// Ensure twitter card is set to summary_large_image
|
||||
const twitterCard = $('meta[name="twitter:card"]');
|
||||
if (twitterCard.length) {
|
||||
twitterCard.attr('content', 'summary_large_image');
|
||||
} else {
|
||||
$('head').append('<meta name="twitter:card" content="summary_large_image" />');
|
||||
}
|
||||
}
|
||||
|
||||
if (meta.jsonLd) {
|
||||
// Remove existing JSON-LD if any
|
||||
$('script[type="application/ld+json"]').remove();
|
||||
|
||||
// Append new JSON-LD
|
||||
const script = `<script type="application/ld+json">${JSON.stringify(meta.jsonLd)}</script>`;
|
||||
$('head').append(script);
|
||||
}
|
||||
|
||||
if (meta.scripts && meta.scripts.length > 0) {
|
||||
meta.scripts.forEach(script => {
|
||||
const scriptTag = `<script type="text/javascript"${script.id ? ` id="${script.id}"` : ''}>${script.content}</script>`;
|
||||
$('head').append(scriptTag);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return $.html();
|
||||
};
|
||||
|
||||
export const extractDisplayImage = (content: any): { pictureId?: string, imageUrl?: string } | null => {
|
||||
let candidate: { pictureId?: string, imageUrl?: string } | null = null;
|
||||
|
||||
// Helper to extract image from markdown
|
||||
const getMarkdownImage = (text: string): string | null => {
|
||||
const match = text.match(/!\[.*?\]\((.*?)\)/);
|
||||
return match ? match[1] : null;
|
||||
};
|
||||
|
||||
const traverse = (node: any): boolean => { // return true to stop
|
||||
if (!node) return false;
|
||||
|
||||
// Check for Photo Card
|
||||
if (node.widgetId === 'photo-card' && node.props?.pictureId) {
|
||||
candidate = { pictureId: node.props.pictureId };
|
||||
return true; // Found high priority, stop
|
||||
}
|
||||
|
||||
// Check for Gallery Widget
|
||||
if (node.widgetId === 'gallery-widget' && node.props?.pictureIds && Array.isArray(node.props.pictureIds) && node.props.pictureIds.length > 0) {
|
||||
candidate = { pictureId: node.props.pictureIds[0] };
|
||||
return true; // Found gallery, use first image, stop
|
||||
}
|
||||
|
||||
// Check for Image Widget (Explicit)
|
||||
if (node.widgetId === 'image' && node.props?.src) {
|
||||
candidate = { imageUrl: node.props.src };
|
||||
return true; // Found explicit image, stop (matches legacy behavior of first-found)
|
||||
}
|
||||
|
||||
// Check for Markdown Text (Fallback)
|
||||
if (node.widgetId === 'markdown-text' && node.props?.content) {
|
||||
const imgUrl = getMarkdownImage(node.props.content);
|
||||
if (imgUrl && !candidate) {
|
||||
// Only use markdown if we don't have a candidate yet
|
||||
candidate = { imageUrl: imgUrl };
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(node)) {
|
||||
for (const child of node) {
|
||||
if (traverse(child)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof node === 'object') {
|
||||
// Generic traversal for any nested property (columns, slots, children, props, etc.)
|
||||
for (const key in node) {
|
||||
// Skip primitive checks or circular refs if possible?
|
||||
// JSON structure is a tree, so circular refs shouldn't exist in 'content'.
|
||||
if (node[key] && typeof node[key] === 'object') {
|
||||
if (traverse(node[key])) return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
traverse(content);
|
||||
if (!candidate) {
|
||||
// logger.debug({ contentPreview: JSON.stringify(content).slice(0, 100) }, '[extractDisplayImage] No candidate found');
|
||||
} else {
|
||||
logger.debug({ candidate }, '[extractDisplayImage] Match found');
|
||||
}
|
||||
return candidate;
|
||||
};
|
||||
525
packages/shared/src/server/products/serving/routes.ts
Normal file
525
packages/shared/src/server/products/serving/routes.ts
Normal file
@ -0,0 +1,525 @@
|
||||
import { createRoute, z } from '@hono/zod-openapi';
|
||||
import { Admin, Public } from '../../commons/decorators.js';
|
||||
|
||||
type ServiceRouteOptions = Parameters<typeof createRoute>[0] & {
|
||||
public?: boolean;
|
||||
admin?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Factory function to create a service route with optional decorators
|
||||
*/
|
||||
function createServiceRoute(options: ServiceRouteOptions) {
|
||||
const { public: isPublic, admin: isAdmin, ...routeDef } = options;
|
||||
let route = createRoute(routeDef);
|
||||
|
||||
if (isPublic) {
|
||||
route = Public(route);
|
||||
}
|
||||
|
||||
if (isAdmin) {
|
||||
route = Admin(route);
|
||||
}
|
||||
|
||||
return route;
|
||||
}
|
||||
|
||||
export function createRouteBody(
|
||||
method: string,
|
||||
path: string,
|
||||
tags: string[],
|
||||
summary: string,
|
||||
description: string,
|
||||
request: any,
|
||||
responses: any,
|
||||
publicRoute: boolean = true,
|
||||
adminRoute: boolean = false) {
|
||||
return createServiceRoute({
|
||||
method: method as any,
|
||||
path,
|
||||
tags,
|
||||
summary,
|
||||
description,
|
||||
request,
|
||||
responses,
|
||||
public: publicRoute,
|
||||
admin: adminRoute
|
||||
})
|
||||
}
|
||||
|
||||
export const getFeedRoute = createRouteBody(
|
||||
'get',
|
||||
'/feed.xml',
|
||||
['Serving'],
|
||||
'Get RSS Feed',
|
||||
'Returns the latest posts as an RSS 2.0 feed.',
|
||||
undefined,
|
||||
{
|
||||
200: {
|
||||
description: 'RSS Feed',
|
||||
content: {
|
||||
'application/xml': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const getMerchantFeedRoute = createRouteBody(
|
||||
'get',
|
||||
'/products.xml',
|
||||
['Serving'],
|
||||
'Get Merchant Feed',
|
||||
'Returns the latest products as a Google Merchant XML feed.',
|
||||
undefined,
|
||||
{
|
||||
200: {
|
||||
description: 'XML Feed',
|
||||
content: {
|
||||
'application/xml': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const getLLMTextRoute = createRouteBody(
|
||||
'get',
|
||||
'/llms.txt',
|
||||
['Serving'],
|
||||
'Get LLM Summary',
|
||||
'Returns a Markdown summary of content for AI agents.',
|
||||
undefined,
|
||||
{
|
||||
200: {
|
||||
description: 'Markdown Text',
|
||||
content: {
|
||||
'text/plain': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const getPostMetaRoute = createRouteBody(
|
||||
'get',
|
||||
'/post/:id',
|
||||
['Serving'],
|
||||
'Get Post with Metadata',
|
||||
'Serves the React app HTML with injected Open Graph metadata for the specific post.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'HTML Page',
|
||||
content: {
|
||||
'text/html': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Post not found (Serves default HTML)',
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
export const getHomeRoute = createRouteBody(
|
||||
'get',
|
||||
'/',
|
||||
['Serving'],
|
||||
'Get Home Page',
|
||||
'Serves the home page with injected feed data.',
|
||||
undefined,
|
||||
{
|
||||
200: {
|
||||
description: 'HTML Page',
|
||||
content: {
|
||||
'text/html': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const getProfilePageRoute = createRouteBody(
|
||||
'get',
|
||||
'/profile/:id',
|
||||
['Serving'],
|
||||
'Get Profile Page',
|
||||
'Serves the profile page with injected profile data.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'HTML Page',
|
||||
content: {
|
||||
'text/html': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const getApiPostDetailsRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/posts/:id',
|
||||
['Posts'],
|
||||
'Get Post Details',
|
||||
'Get Post Details', // Missing description, reused summary
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
query: z.object({
|
||||
sizes: z.string().optional().openapi({ description: 'Responsive sizes' }),
|
||||
formats: z.string().optional().openapi({ description: 'Responsive formats' })
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Post Details',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Post not found'
|
||||
}
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
export const getApiFeedRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/feed',
|
||||
['Feed'],
|
||||
'Get JSON Feed',
|
||||
'Get JSON Feed', // Missing description
|
||||
{
|
||||
query: z.object({
|
||||
page: z.string().optional().default('0'),
|
||||
limit: z.string().optional().default('30'),
|
||||
source: z.string().optional().default('home').openapi({
|
||||
description: 'Feed source (home, user, etc)'
|
||||
}),
|
||||
sourceId: z.string().optional().openapi({
|
||||
description: 'ID for the source (userId, etc)'
|
||||
}),
|
||||
isOrgContext: z.enum(['true', 'false']).optional().openapi({
|
||||
description: 'Is Organization Context'
|
||||
}),
|
||||
orgSlug: z.string().optional().openapi({
|
||||
description: 'Organization Slug'
|
||||
}),
|
||||
cache: z.enum(['true', 'false']).optional().default('true').openapi({
|
||||
description: 'Set to false to bypass server cache'
|
||||
}),
|
||||
sizes: z.string().optional().openapi({
|
||||
description: 'Comma-separated list of widths (e.g. "320,640,1024"). Default: "[320, 640, 1024]"'
|
||||
}),
|
||||
formats: z.string().optional().openapi({
|
||||
description: 'Comma-separated list of formats (e.g. "avif,webp"). Default: "avif,webp"'
|
||||
})
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'JSON Feed',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
export const getProfileRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/profile/:id',
|
||||
['Users'],
|
||||
'Get Profile',
|
||||
'Get Profile',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Profile Data',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
false // private
|
||||
);
|
||||
|
||||
export const getSecretsRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/me/secrets',
|
||||
['Users'],
|
||||
'Get User Secrets',
|
||||
'Get User Secrets',
|
||||
undefined,
|
||||
{
|
||||
200: {
|
||||
description: 'User Secrets',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.any()
|
||||
}
|
||||
}
|
||||
},
|
||||
401: {
|
||||
description: 'Unauthorized'
|
||||
}
|
||||
},
|
||||
false // private
|
||||
);
|
||||
|
||||
export const postFlushCacheRoute = createRouteBody(
|
||||
'post',
|
||||
'/api/flush-cache',
|
||||
['Posts'],
|
||||
'Flush Cache',
|
||||
'Flushes the server-side content cache and the disk-based image cache.',
|
||||
undefined,
|
||||
{
|
||||
200: {
|
||||
description: 'Cache Flushed',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
500: {
|
||||
description: 'Internal Server Error'
|
||||
}
|
||||
},
|
||||
false, // not public
|
||||
true // admin
|
||||
);
|
||||
|
||||
export const invalidateCacheRoute = createRouteBody(
|
||||
'post',
|
||||
'/api/cache/invalidate',
|
||||
['System'],
|
||||
'Invalidate Cache by Path',
|
||||
'Invalidates cache keys matching the provided paths.',
|
||||
{
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
paths: z.array(z.string()).openapi({
|
||||
description: 'List of URL paths to invalidate (e.g. /api/user-page/123/slug)'
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Cache Invalidated',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
success: z.boolean(),
|
||||
count: z.number()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
false, // not public
|
||||
false // not admin-only (allow authed users to invalidate their own content ideally, for now just authed)
|
||||
);
|
||||
|
||||
export const getProfilesRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/profiles',
|
||||
['Users'],
|
||||
'Get Batch Profiles',
|
||||
'Get Batch Profiles',
|
||||
{
|
||||
query: z.object({
|
||||
ids: z.string().openapi({
|
||||
description: 'Comma-separated list of user IDs'
|
||||
})
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Profiles Map',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.record(z.string(), z.any())
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
export const getApiMediaItemsRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/media-items',
|
||||
['Media'],
|
||||
'Get Media Items by IDs',
|
||||
'Fetches multiple media items by their IDs using server-side cache for optimal performance.',
|
||||
{
|
||||
query: z.object({
|
||||
ids: z.string().openapi({
|
||||
description: 'Comma-separated list of picture IDs'
|
||||
}),
|
||||
maintainOrder: z.enum(['true', 'false']).optional().openapi({
|
||||
description: 'Maintain the order of IDs in the response'
|
||||
}),
|
||||
sizes: z.string().optional().openapi({
|
||||
description: 'Comma-separated list of widths for responsive images'
|
||||
}),
|
||||
formats: z.string().optional().openapi({
|
||||
description: 'Comma-separated list of formats for responsive images'
|
||||
})
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Array of Media Items',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.array(z.any())
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
true // public
|
||||
);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
export const getEmbedRoute = createRouteBody(
|
||||
'get',
|
||||
'/embed/:id',
|
||||
['Serving'],
|
||||
'Get Embed Page',
|
||||
'Serves the embed page with injected post data.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'HTML Page',
|
||||
content: {
|
||||
'text/html': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
},
|
||||
404: {
|
||||
description: 'Post not found',
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
|
||||
export const getSiteInfoRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/serving/site-info',
|
||||
['Serving'],
|
||||
'Get Site Information',
|
||||
'Extracts metadata (Open Graph, JSON-LD, etc.) from a given URL.',
|
||||
{
|
||||
query: z.object({
|
||||
url: z.string().openapi({
|
||||
description: 'The URL to extract information from',
|
||||
example: 'https://example.com'
|
||||
})
|
||||
})
|
||||
},
|
||||
{
|
||||
200: {
|
||||
description: 'Site Information',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
title: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
url: z.string().optional(),
|
||||
siteName: z.string().optional(),
|
||||
favicon: z.string().optional(),
|
||||
og: z.record(z.string(), z.string().optional()).optional(),
|
||||
images: z.array(z.object({
|
||||
src: z.string(),
|
||||
width: z.number().optional(),
|
||||
height: z.number().optional(),
|
||||
alt: z.string().optional()
|
||||
})).optional(),
|
||||
structuredData: z.array(z.any()).optional(),
|
||||
social: z.array(z.object({
|
||||
source: z.string(),
|
||||
url: z.string()
|
||||
})).optional()
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
400: {
|
||||
description: 'Invalid URL'
|
||||
},
|
||||
500: {
|
||||
description: 'Failed to extract information'
|
||||
}
|
||||
},
|
||||
true // public? Site info scraper is usually public
|
||||
);
|
||||
|
||||
export const getSitemapRoute = createRouteBody(
|
||||
'get',
|
||||
'/sitemap-en.xml',
|
||||
['Serving'],
|
||||
'Get Sitemap',
|
||||
'Returns the sitemap XML for internal pages.',
|
||||
undefined,
|
||||
{
|
||||
200: {
|
||||
description: 'Sitemap XML',
|
||||
content: {
|
||||
'application/xml': {
|
||||
schema: z.string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
432
packages/shared/src/server/products/serving/site-info.ts
Normal file
432
packages/shared/src/server/products/serving/site-info.ts
Normal file
@ -0,0 +1,432 @@
|
||||
import * as cheerio from 'cheerio';
|
||||
import { logger } from './logger.js';
|
||||
|
||||
export interface SiteInfoError {
|
||||
error: string;
|
||||
details?: string;
|
||||
status?: number;
|
||||
}
|
||||
|
||||
export interface SiteImage {
|
||||
src: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
alt?: string;
|
||||
}
|
||||
|
||||
export interface SiteSocial {
|
||||
source: 'twitter' | 'facebook' | 'instagram' | 'linkedin' | 'youtube' | 'other';
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface SitePageInfo {
|
||||
title: string;
|
||||
description: string;
|
||||
image: string | null;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface SiteInfoResult {
|
||||
// Normalized Page Info
|
||||
page: SitePageInfo;
|
||||
|
||||
// Raw/Extended Data
|
||||
title?: string;
|
||||
description?: string;
|
||||
url?: string;
|
||||
siteName?: string;
|
||||
favicon?: string;
|
||||
|
||||
// OpenGraph
|
||||
og?: {
|
||||
title?: string;
|
||||
description?: string;
|
||||
image?: string;
|
||||
url?: string;
|
||||
type?: string;
|
||||
site_name?: string;
|
||||
[key: string]: string | undefined;
|
||||
};
|
||||
|
||||
// Images found on page
|
||||
images?: SiteImage[];
|
||||
|
||||
// Structured Data (JSON-LD)
|
||||
structuredData?: any[];
|
||||
|
||||
// Social Links found
|
||||
social?: SiteSocial[];
|
||||
}
|
||||
|
||||
interface SiteInfoResolver {
|
||||
test(url: URL): boolean;
|
||||
resolve(url: URL, options: { timeout?: number }): Promise<SiteInfoResult>;
|
||||
}
|
||||
|
||||
// --- Resolvers ---
|
||||
|
||||
class DefaultResolver implements SiteInfoResolver {
|
||||
test(url: URL): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
async resolve(url: URL, options: { timeout?: number }): Promise<SiteInfoResult> {
|
||||
return fetchAndParse(url, options);
|
||||
}
|
||||
}
|
||||
|
||||
class YouTubeResolver implements SiteInfoResolver {
|
||||
test(url: URL): boolean {
|
||||
return url.hostname.includes('youtube.com') || url.hostname.includes('youtu.be');
|
||||
}
|
||||
|
||||
async resolve(url: URL, options: { timeout?: number }): Promise<SiteInfoResult> {
|
||||
let videoId = this.getVideoId(url);
|
||||
// Special case for shorts
|
||||
if (!videoId && url.pathname.startsWith('/shorts/')) {
|
||||
videoId = url.pathname.slice(8); // remove /shorts/
|
||||
}
|
||||
|
||||
if (!videoId) {
|
||||
// Fallback for channel pages etc
|
||||
return fetchAndParse(url, options);
|
||||
}
|
||||
|
||||
let oEmbedData: any = null;
|
||||
try {
|
||||
// Ref: https://www.youtube.com/oembed?url=...&format=json
|
||||
const oEmbedUrl = `https://www.youtube.com/oembed?url=${encodeURIComponent(url.href)}&format=json`;
|
||||
const response = await fetch(oEmbedUrl);
|
||||
if (response.ok) {
|
||||
oEmbedData = await response.json();
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore oEmbed failure
|
||||
}
|
||||
|
||||
const title = oEmbedData?.title || 'YouTube Video';
|
||||
const description = ''; // oEmbed doesn't give description usually
|
||||
const author = oEmbedData?.author_name || '';
|
||||
|
||||
// Generate thumbnails
|
||||
const thumbnails = this.getThumbnails(videoId);
|
||||
const images: SiteImage[] = [
|
||||
{ src: thumbnails.max, width: 1280, height: 720 }, // Try max res first
|
||||
{ src: thumbnails.high, width: 480, height: 360 },
|
||||
{ src: thumbnails.medium, width: 320, height: 180 },
|
||||
{ src: thumbnails.default, width: 120, height: 90 }
|
||||
];
|
||||
|
||||
// Ensure we supply the best image as the page image
|
||||
const bestImage = thumbnails.max;
|
||||
|
||||
const result: SiteInfoResult = {
|
||||
page: {
|
||||
title: title,
|
||||
description: description,
|
||||
image: bestImage,
|
||||
url: url.href
|
||||
},
|
||||
title: title + (author ? ` - ${author}` : ''),
|
||||
description: description,
|
||||
url: url.href,
|
||||
siteName: 'YouTube',
|
||||
images: images,
|
||||
og: {
|
||||
title: title,
|
||||
image: bestImage,
|
||||
url: url.href,
|
||||
site_name: 'YouTube',
|
||||
type: 'video.other' // or video
|
||||
}
|
||||
};
|
||||
|
||||
if (oEmbedData?.html) {
|
||||
// We can store embed html if needed in meta?
|
||||
// For now just standard fields.
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private getVideoId(url: URL): string | null {
|
||||
if (url.hostname.includes('youtu.be')) {
|
||||
return url.pathname.slice(1);
|
||||
}
|
||||
if (url.hostname.includes('youtube.com')) {
|
||||
const v = url.searchParams.get('v');
|
||||
if (v) return v;
|
||||
// Handle embed/v paths?
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private getThumbnails(videoId: string) {
|
||||
return {
|
||||
default: `https://img.youtube.com/vi/${videoId}/default.jpg`,
|
||||
medium: `https://img.youtube.com/vi/${videoId}/mqdefault.jpg`,
|
||||
high: `https://img.youtube.com/vi/${videoId}/hqdefault.jpg`,
|
||||
max: `https://img.youtube.com/vi/${videoId}/maxresdefault.jpg`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class TikTokResolver implements SiteInfoResolver {
|
||||
test(url: URL): boolean {
|
||||
return url.hostname.includes('tiktok.com');
|
||||
}
|
||||
|
||||
async resolve(url: URL, options: { timeout?: number }): Promise<SiteInfoResult> {
|
||||
try {
|
||||
// Dynamic import to avoid issues if the package causes trouble in other environments
|
||||
// or use standard import if confident. Let's use standard import at top of file,
|
||||
// but for now I'll use require or dynamic import if I can't change top of file easily
|
||||
// without reading it all again.
|
||||
// Actually I can add the import at the top in a separate chunk.
|
||||
|
||||
const { TikTokClient } = await import('@ssut/tiktok-api');
|
||||
const client = new TikTokClient({ region: 'US' });
|
||||
|
||||
const videoId = this.getVideoId(url);
|
||||
if (!videoId) {
|
||||
throw new Error('Could not extract TikTok Video ID');
|
||||
}
|
||||
|
||||
const post = await client.getPost(videoId); // post is the full object
|
||||
if (!post) {
|
||||
throw new Error('No post data returned from TikTok API');
|
||||
}
|
||||
|
||||
// Map data based on user request and post.json structure
|
||||
// post.data is the main wrapper?
|
||||
// In post.json: root object has "data".
|
||||
// getPost returns "Promise<any>" usually?
|
||||
// Looking at test.ts: client.getPost returns the object that has "data"?
|
||||
// test.ts:
|
||||
// const post2 = await client.getPost(...)
|
||||
// writeFileSync(..., JSON.stringify(post2, null, 2))
|
||||
// post.json starts with { "data": { ... } }
|
||||
// So post2 IS the object containing "data".
|
||||
|
||||
const data = post.data;
|
||||
if (!data) {
|
||||
throw new Error('No data property in TikTok response');
|
||||
}
|
||||
|
||||
const title = data.author?.nickname || data.author?.uniqueId || 'TikTok Author';
|
||||
const description = data.desc || ''; // requested: "empty for now"
|
||||
const image = data.video?.cover || data.music?.coverLarge || null;
|
||||
|
||||
const result: SiteInfoResult = {
|
||||
page: {
|
||||
title,
|
||||
description,
|
||||
image,
|
||||
url: url.href
|
||||
},
|
||||
images: [],
|
||||
title,
|
||||
description,
|
||||
url: url.href,
|
||||
siteName: 'TikTok'
|
||||
};
|
||||
|
||||
if (image) {
|
||||
result.images?.push({ src: image });
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
} catch (e: any) {
|
||||
// Fallback to default fetch if API fails (or just throw?)
|
||||
// DefaultResolver might work better for valid OGs if API fails.
|
||||
// But let's just log and fallback to fetchAndParse?
|
||||
// user requested extending site-info, implies this should be the primary way.
|
||||
// If it fails, maybe return basic info?
|
||||
|
||||
// If API fails, maybe try standard fetch?
|
||||
return fetchAndParse(url, options);
|
||||
}
|
||||
}
|
||||
|
||||
private getVideoId(url: URL): string | null {
|
||||
// format: https://www.tiktok.com/@user/video/7218894668518933510
|
||||
// pathname: /@user/video/7218894668518933510
|
||||
const parts = url.pathname.split('/');
|
||||
// parts: ["", "@user", "video", "id"]
|
||||
return parts[parts.length - 1];
|
||||
}
|
||||
}
|
||||
|
||||
const REGISTRY: SiteInfoResolver[] = [
|
||||
new YouTubeResolver(),
|
||||
new TikTokResolver(),
|
||||
new DefaultResolver() // Catch-all
|
||||
];
|
||||
|
||||
// --- Core Logic ---
|
||||
|
||||
async function fetchAndParse(targetUrl: URL, options: { timeout?: number }): Promise<SiteInfoResult> {
|
||||
const timeout = options.timeout || 10000;
|
||||
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
||||
|
||||
const response = await fetch(targetUrl.toString(), {
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (compatible; PolymechBot/1.0; +http://polymech.com)',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
|
||||
},
|
||||
signal: controller.signal
|
||||
});
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (!response.ok) {
|
||||
throw { error: `Failed to fetch URL: ${response.statusText}`, status: response.status };
|
||||
}
|
||||
|
||||
const html = await response.text();
|
||||
const $ = cheerio.load(html);
|
||||
const result: SiteInfoResult = {
|
||||
page: { title: '', description: '', image: null, url: targetUrl.href },
|
||||
images: [],
|
||||
structuredData: [],
|
||||
social: [],
|
||||
og: {}
|
||||
};
|
||||
|
||||
// 1. Basic Meta
|
||||
const title = $('title').text().trim();
|
||||
if (title) result.title = title;
|
||||
|
||||
const description = $('meta[name="description"]').attr('content') ||
|
||||
$('meta[property="og:description"]').attr('content');
|
||||
if (description) result.description = description;
|
||||
|
||||
const canonical = $('link[rel="canonical"]').attr('href');
|
||||
result.url = canonical || response.url; // Use canonical if valid, else final URL
|
||||
// Update normalized url
|
||||
result.page.url = result.url;
|
||||
|
||||
// Favicon
|
||||
const favicon = $('link[rel="icon"]').attr('href') ||
|
||||
$('link[rel="shortcut icon"]').attr('href') ||
|
||||
'/favicon.ico';
|
||||
if (favicon) {
|
||||
try {
|
||||
result.favicon = new URL(favicon, result.url).href;
|
||||
} catch (e) {
|
||||
// ignore invalid urls
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Open Graph
|
||||
const ogProps = ['title', 'description', 'image', 'url', 'site_name', 'type'];
|
||||
ogProps.forEach(prop => {
|
||||
const val = $(`meta[property="og:${prop}"]`).attr('content');
|
||||
if (val && result.og) {
|
||||
result.og[prop] = val;
|
||||
}
|
||||
});
|
||||
|
||||
// Fill top-level fields from OG if missing
|
||||
if (!result.title && result.og?.title) result.title = result.og.title;
|
||||
if (!result.description && result.og?.description) result.description = result.og.description;
|
||||
if (!result.siteName && result.og?.site_name) result.siteName = result.og.site_name;
|
||||
|
||||
// 3. JSON-LD
|
||||
$('script[type="application/ld+json"]').each((_, el) => {
|
||||
try {
|
||||
const json = JSON.parse($(el).html() || '{}');
|
||||
result.structuredData?.push(json);
|
||||
} catch (e) {
|
||||
// ignore parse errors
|
||||
}
|
||||
});
|
||||
|
||||
// 4. Images
|
||||
// Prioritize og:image
|
||||
if (result.og?.image) {
|
||||
result.images?.push({ src: result.og.image });
|
||||
}
|
||||
|
||||
// Look for other useful images (e.g., twitter:image)
|
||||
const twitterImg = $('meta[name="twitter:image"]').attr('content');
|
||||
if (twitterImg && twitterImg !== result.og?.image) {
|
||||
result.images?.push({ src: twitterImg });
|
||||
}
|
||||
|
||||
// Scan img tags (limit to first few relevant ones to avoid junk)
|
||||
$('img').slice(0, 10).each((_, el) => {
|
||||
let src = $(el).attr('src');
|
||||
if (src) {
|
||||
try {
|
||||
src = new URL(src, result.url).href;
|
||||
// Avoid dups
|
||||
if (!result.images?.find(i => i.src === src)) {
|
||||
result.images?.push({ src, alt: $(el).attr('alt') });
|
||||
}
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// 5. Social Links (Common patterns)
|
||||
const socialPatterns: Record<string, string> = {
|
||||
'twitter.com': 'twitter',
|
||||
'facebook.com': 'facebook',
|
||||
'instagram.com': 'instagram',
|
||||
'linkedin.com': 'linkedin',
|
||||
'youtube.com': 'youtube'
|
||||
};
|
||||
|
||||
$('a').each((_, el) => {
|
||||
let href = $(el).attr('href');
|
||||
if (href) {
|
||||
try {
|
||||
href = new URL(href, result.url).href;
|
||||
const hostname = new URL(href).hostname;
|
||||
|
||||
for (const [domain, source] of Object.entries(socialPatterns)) {
|
||||
if (hostname.includes(domain)) {
|
||||
// Check if not already added
|
||||
if (!result.social?.find(s => s.url === href)) {
|
||||
result.social?.push({ source: source as any, url: href });
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Normalize Page Info
|
||||
result.page.title = result.title || '';
|
||||
result.page.description = result.description || '';
|
||||
result.page.image = result.og?.image || (result.images && result.images.length > 0 ? result.images[0].src : null);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export async function extractSiteInfo(url: string, options: { timeout?: number } = {}): Promise<SiteInfoResult> {
|
||||
try {
|
||||
const targetUrl = new URL(url);
|
||||
|
||||
for (const resolver of REGISTRY) {
|
||||
if (resolver.test(targetUrl)) {
|
||||
return await resolver.resolve(targetUrl, options);
|
||||
}
|
||||
}
|
||||
|
||||
// Should not happen due to DefaultResolver
|
||||
throw new Error('No resolver found');
|
||||
|
||||
} catch (e: any) {
|
||||
logger.error({ err: e, url }, 'Failed to extract site info');
|
||||
throw { error: 'Failed to extract site info', details: e.message, status: e.status || 500 };
|
||||
}
|
||||
}
|
||||
@ -3,7 +3,7 @@ import { EventBus } from './EventBus.js';
|
||||
|
||||
const findProductByQueue = (queue: string) => {
|
||||
return ALL_PRODUCTS.find(p =>
|
||||
p.workers?.some(w => {
|
||||
p.workers?.some((w: any) => {
|
||||
try {
|
||||
const worker = new (w as any)();
|
||||
return worker.queueName === queue;
|
||||
|
||||
@ -1,13 +1,51 @@
|
||||
|
||||
import { createLogRoutes } from '@/commons/log-routes-factory.js';
|
||||
import { createRoute, z } from '@hono/zod-openapi';
|
||||
import { Public, Admin } from '../../commons/decorators.js';
|
||||
|
||||
export const { getRoute: getVideoLogsRoute, streamRoute: streamVideoLogsRoute } = createLogRoutes('Videos', '/api/videos/logs');
|
||||
|
||||
export const postVideoRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/videos',
|
||||
request: {
|
||||
/**
|
||||
* Factory function to create a video service route with optional decorators
|
||||
*/
|
||||
function createRouteBody(
|
||||
method: string,
|
||||
path: string,
|
||||
tags: string[],
|
||||
summary: string,
|
||||
description: string,
|
||||
request: any,
|
||||
responses: any,
|
||||
publicRoute: boolean = false,
|
||||
adminRoute: boolean = false
|
||||
) {
|
||||
let route = createRoute({
|
||||
method: method as any,
|
||||
path,
|
||||
tags,
|
||||
summary,
|
||||
description,
|
||||
request,
|
||||
responses
|
||||
});
|
||||
|
||||
if (publicRoute) {
|
||||
route = Public(route);
|
||||
}
|
||||
|
||||
if (adminRoute) {
|
||||
route = Admin(route);
|
||||
}
|
||||
|
||||
return route;
|
||||
}
|
||||
|
||||
export const postVideoRoute = createRouteBody(
|
||||
'post',
|
||||
'/api/videos',
|
||||
['Videos'],
|
||||
'Process Video',
|
||||
'Starts a video processing job. Accepts either a file upload or URL.',
|
||||
{
|
||||
query: z.object({
|
||||
preset: z.string().optional().openapi({ example: 'web-720p' }),
|
||||
url: z.string().optional().openapi({ example: 'https://example.com/video.mp4' })
|
||||
@ -19,7 +57,7 @@ export const postVideoRoute = createRoute({
|
||||
file: z.any().optional().openapi({ type: 'string', format: 'binary' }),
|
||||
})
|
||||
},
|
||||
'application/json': { // Also allow JSON body for URL only requests if easier (hono middleware handles it?) - Stick to multipart or query for now for consistency
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
url: z.string().optional()
|
||||
})
|
||||
@ -27,7 +65,7 @@ export const postVideoRoute = createRoute({
|
||||
}
|
||||
}
|
||||
},
|
||||
responses: {
|
||||
{
|
||||
202: {
|
||||
content: {
|
||||
'application/json': {
|
||||
@ -40,28 +78,33 @@ export const postVideoRoute = createRoute({
|
||||
},
|
||||
description: 'Job Accepted'
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
false // private - requires auth
|
||||
);
|
||||
|
||||
export const uploadVideoRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/api/videos/upload',
|
||||
request: {
|
||||
export const uploadVideoRoute = createRouteBody(
|
||||
'post',
|
||||
'/api/videos/upload',
|
||||
['Videos'],
|
||||
'Upload Video',
|
||||
'Uploads a video file, creates a database entry, and starts processing.',
|
||||
{
|
||||
query: z.object({
|
||||
preset: z.string().optional().openapi({ example: 'web-720p' }),
|
||||
url: z.string().optional().openapi({ example: 'https://example.com/video.mp4' })
|
||||
userId: z.string().optional(),
|
||||
title: z.string().optional()
|
||||
}),
|
||||
body: {
|
||||
content: {
|
||||
'multipart/form-data': {
|
||||
schema: z.object({
|
||||
file: z.any().optional().openapi({ type: 'string', format: 'binary' }),
|
||||
file: z.any().openapi({ type: 'string', format: 'binary' }),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
responses: {
|
||||
{
|
||||
200: {
|
||||
content: {
|
||||
'application/json': {
|
||||
@ -81,18 +124,22 @@ export const uploadVideoRoute = createRoute({
|
||||
},
|
||||
description: 'Video Uploaded and Database Entry Created'
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
false // private - requires auth
|
||||
);
|
||||
|
||||
export const getJobRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/videos/jobs/{id}',
|
||||
request: {
|
||||
export const getJobRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/videos/jobs/{id}',
|
||||
['Videos'],
|
||||
'Get Job Status',
|
||||
'Retrieves the status and details of a video processing job.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
responses: {
|
||||
{
|
||||
200: {
|
||||
content: {
|
||||
'application/json': {
|
||||
@ -106,19 +153,26 @@ export const getJobRoute = createRoute({
|
||||
}
|
||||
},
|
||||
description: 'Job Status'
|
||||
},
|
||||
404: {
|
||||
description: 'Job not found'
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
true // public - allow checking job status
|
||||
);
|
||||
|
||||
export const getJobProgressRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/videos/jobs/{id}/progress',
|
||||
request: {
|
||||
export const getJobProgressRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/videos/jobs/{id}/progress',
|
||||
['Videos'],
|
||||
'Stream Job Progress',
|
||||
'Server-Sent Events stream of job progress updates.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string().openapi({ param: { name: 'id', in: 'path' }, example: '123' })
|
||||
})
|
||||
},
|
||||
responses: {
|
||||
{
|
||||
200: {
|
||||
content: {
|
||||
'text/event-stream': {
|
||||
@ -130,36 +184,47 @@ export const getJobProgressRoute = createRoute({
|
||||
404: {
|
||||
description: 'Job not found'
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
true // public - allow streaming progress
|
||||
);
|
||||
|
||||
export const downloadVideoRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/videos/jobs/{id}/download',
|
||||
request: {
|
||||
export const downloadVideoRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/videos/jobs/{id}/download',
|
||||
['Videos'],
|
||||
'Download Processed Video',
|
||||
'Downloads the processed video file with range request support.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
responses: {
|
||||
{
|
||||
200: {
|
||||
description: 'Video File Stream'
|
||||
},
|
||||
206: {
|
||||
description: 'Partial Content (Range Request)'
|
||||
},
|
||||
404: {
|
||||
description: 'Not Found'
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
true // public - allow downloading processed videos
|
||||
);
|
||||
|
||||
export const getHlsPlaylistRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/videos/jobs/{id}/hls/playlist.m3u8',
|
||||
request: {
|
||||
export const getHlsPlaylistRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/videos/jobs/{id}/hls/playlist.m3u8',
|
||||
['Videos'],
|
||||
'Get HLS Playlist',
|
||||
'Retrieves the HLS master playlist for adaptive streaming.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
responses: {
|
||||
{
|
||||
200: {
|
||||
content: {
|
||||
'application/vnd.apple.mpegurl': {
|
||||
@ -171,19 +236,23 @@ export const getHlsPlaylistRoute = createRoute({
|
||||
404: {
|
||||
description: 'Not Found'
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
true // public - HLS playlists need to be accessible for video playback
|
||||
);
|
||||
|
||||
export const getHlsSegmentRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/videos/jobs/{id}/hls/{segment}',
|
||||
request: {
|
||||
export const getHlsSegmentRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/videos/jobs/{id}/hls/{segment}',
|
||||
['Videos'],
|
||||
'Get HLS Segment',
|
||||
'Retrieves an HLS video segment for streaming.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string(),
|
||||
segment: z.string()
|
||||
})
|
||||
},
|
||||
responses: {
|
||||
{
|
||||
200: {
|
||||
content: {
|
||||
'video/MP2T': {
|
||||
@ -195,37 +264,44 @@ export const getHlsSegmentRoute = createRoute({
|
||||
404: {
|
||||
description: 'Not Found'
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
true // public - HLS segments need to be accessible for video playback
|
||||
);
|
||||
|
||||
|
||||
export const cancelJobRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/api/videos/jobs/{id}',
|
||||
request: {
|
||||
export const cancelJobRoute = createRouteBody(
|
||||
'delete',
|
||||
'/api/videos/jobs/{id}',
|
||||
['Videos'],
|
||||
'Cancel Job',
|
||||
'Cancels a running video processing job.',
|
||||
{
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
},
|
||||
responses: {
|
||||
{
|
||||
200: {
|
||||
description: 'Job Cancelled'
|
||||
},
|
||||
404: {
|
||||
description: 'Job Not Found'
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
false // private - only authenticated users can cancel jobs
|
||||
);
|
||||
|
||||
export const proxyVideoRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/api/videos/proxy',
|
||||
request: {
|
||||
export const proxyVideoRoute = createRouteBody(
|
||||
'get',
|
||||
'/api/videos/proxy',
|
||||
['Videos'],
|
||||
'Proxy Video Stream',
|
||||
'Proxies video content from external URLs with CORS support.',
|
||||
{
|
||||
query: z.object({
|
||||
url: z.string().openapi({ example: 'https://generativelanguage.googleapis.com/...' })
|
||||
})
|
||||
},
|
||||
responses: {
|
||||
{
|
||||
200: {
|
||||
description: 'Video Stream (Proxy)'
|
||||
},
|
||||
@ -235,5 +311,6 @@ export const proxyVideoRoute = createRoute({
|
||||
500: {
|
||||
description: 'Proxy Error'
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
true // public - allow proxying external videos
|
||||
);
|
||||
|
||||
@ -1,33 +1,8 @@
|
||||
import { z } from '@hono/zod-openapi'
|
||||
|
||||
export const ProductSchema = z.object({
|
||||
id: z.number().openapi({ example: 1 }),
|
||||
name: z.string().openapi({ example: 'Product A' }),
|
||||
slug: z.string().openapi({ example: 'product-a' }),
|
||||
description: z.string().optional().openapi({ example: 'Great product' }),
|
||||
price: z.number().openapi({ example: 100 }),
|
||||
variants: z.any().optional().openapi({ example: [] }),
|
||||
})
|
||||
|
||||
export const SubscriptionSchema = z.object({
|
||||
id: z.number().openapi({ example: 1 }),
|
||||
name: z.string().openapi({ example: 'Basic' }),
|
||||
price: z.number().openapi({ example: 10 }),
|
||||
})
|
||||
|
||||
export const StatsSchema = z.object({
|
||||
users: z.number().openapi({ example: 100 }),
|
||||
revenue: z.number().openapi({ example: 5000 }),
|
||||
})
|
||||
|
||||
export const ErrorSchema = z.object({
|
||||
error: z.string(),
|
||||
})
|
||||
|
||||
export type Product = z.infer<typeof ProductSchema>
|
||||
export type Subscription = z.infer<typeof SubscriptionSchema>
|
||||
export type Stats = z.infer<typeof StatsSchema>
|
||||
|
||||
export const ImageSchema = z.object({
|
||||
idx: z.number().openapi({ example: 0 }),
|
||||
id: z.number().openapi({ example: 6 }),
|
||||
@ -45,4 +20,3 @@ export const ImageResponseSchema = z.object({
|
||||
data: z.array(ImageSchema).optional(),
|
||||
})
|
||||
|
||||
|
||||
|
||||
70
packages/shared/src/server/serve-assets.ts
Normal file
70
packages/shared/src/server/serve-assets.ts
Normal file
@ -0,0 +1,70 @@
|
||||
|
||||
import { OpenAPIHono } from '@hono/zod-openapi'
|
||||
import { serveStatic } from '@hono/node-server/serve-static'
|
||||
import path from 'path'
|
||||
|
||||
export const registerAssetRoutes = (app: OpenAPIHono) => {
|
||||
// Serve manifest.webmanifest from dist root
|
||||
app.get('/manifest.webmanifest', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
path: 'manifest.webmanifest'
|
||||
}));
|
||||
|
||||
// Serve service worker
|
||||
app.get('/sw.js', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
path: 'sw.js'
|
||||
}));
|
||||
|
||||
// Serve registerSW.js
|
||||
app.get('/registerSW.js', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
path: 'registerSW.js'
|
||||
}));
|
||||
|
||||
// Serve workbox assets if they are at root
|
||||
app.get('/workbox-*.js', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
rewriteRequestPath: (path) => path // Serve matching file
|
||||
}));
|
||||
|
||||
// Serve workbox assets if they are at root
|
||||
app.get('/widgets/*', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist/widgets',
|
||||
rewriteRequestPath: (path) => path // Serve matching file
|
||||
}));
|
||||
|
||||
// Serve root static assets (images, icons, robots.txt, etc)
|
||||
app.get('/:file{.+\\.(png|ico|svg|txt|xml)$}', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
}));
|
||||
|
||||
// Serve static assets from dist
|
||||
app.use('/assets/*', async (c, next) => {
|
||||
await next();
|
||||
if (c.res.ok && c.res.status === 200) {
|
||||
c.res.headers.set('Cache-Control', 'public, max-age=31536000, immutable');
|
||||
}
|
||||
});
|
||||
app.use('/assets/*', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
onNotFound: (path, c) => {
|
||||
return undefined;
|
||||
}
|
||||
}));
|
||||
|
||||
// Serve embed assets
|
||||
app.use('/embed_assets/*', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH ? path.join(process.env.CLIENT_DIST_PATH, 'client/embed') : '../dist/client/embed',
|
||||
onNotFound: (path, c) => {
|
||||
return undefined;
|
||||
},
|
||||
rewriteRequestPath: (path) => path.replace(/^\/embed_assets/, ''),
|
||||
}));
|
||||
|
||||
// Fallback to index.html for SPA
|
||||
app.get('*', serveStatic({
|
||||
root: process.env.CLIENT_DIST_PATH || '../dist',
|
||||
path: 'index.html'
|
||||
}));
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user