pm server - shared

This commit is contained in:
babayaga 2026-01-29 18:14:47 +01:00
parent 68c6e328a3
commit 0bbd4822cb
35 changed files with 4367 additions and 0 deletions

View File

@ -0,0 +1,42 @@
import { LRUCache } from 'lru-cache';
import { CacheAdapter } from './types.js';
export class MemoryCache implements CacheAdapter {
private cache: LRUCache<string, any>;
constructor() {
const defaultTtl = process.env.CACHE_DEFAULT_TTL ? parseInt(process.env.CACHE_DEFAULT_TTL) : 1000 * 60 * 5; // 5 mins default
this.cache = new LRUCache({
max: 500,
ttl: defaultTtl,
updateAgeOnGet: false,
});
}
async get<T>(key: string): Promise<T | null> {
const value = this.cache.get(key);
return (value as T) || null;
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
this.cache.set(key, value, { ttl: ttl ? ttl * 1000 : undefined });
}
async del(key: string): Promise<void> {
this.cache.delete(key);
}
async flush(pattern?: string): Promise<void> {
if (pattern) {
// Manual iteration for pattern matching (simple startsWith)
// LRUCache doesn't support regex keys easily without walking
for (const key of this.cache.keys()) {
if (typeof key === 'string' && key.startsWith(pattern)) {
this.cache.delete(key);
}
}
} else {
this.cache.clear();
}
}
}

View File

@ -0,0 +1,22 @@
import { CacheAdapter } from './types.js';
import { MemoryCache } from './MemoryCache.js';
// Design Pattern: Singleton or Factory
// For now, we export a singleton instance based on ENV or default to Memory
// Future: Read process.env.CACHE_PROVIDER == 'redis'
let instance: CacheAdapter | null = null;
export const getCache = (): CacheAdapter => {
if (!instance) {
// Logic to choose implementation could go here
// e.g. if (process.env.REDIS_URL) instance = new RedisCache() ...
console.log('[Cache] Initializing MemoryCache adapter');
instance = new MemoryCache();
}
return instance;
};
export * from './types.js';
export * from './MemoryCache.js';

View File

@ -0,0 +1,6 @@
export interface CacheAdapter {
get<T>(key: string): Promise<T | null>;
set<T>(key: string, value: T, ttl?: number): Promise<void>;
del(key: string): Promise<void>;
flush(pattern?: string): Promise<void>;
}

View File

@ -0,0 +1,185 @@
import { createRoute, z } from '@hono/zod-openapi';
import { Context } from 'hono';
import { streamSSE, stream } from 'hono/streaming';
import fs from 'fs';
import readline from 'readline';
/**
* Creates OpenAPI route definitions for standard log endpoints.
* @param tag The OpenAPI tag for grouping (e.g. 'System', 'Images')
* @param pathPrefix The URL path prefix (e.g. '/api/logs/system')
*/
export const createLogRoutes = (tag: string, pathPrefix: string) => {
const getRoute = createRoute({
method: 'get',
path: pathPrefix,
tags: [tag],
summary: `Get ${tag} logs`,
description: `Download or view ${tag} logs as a JSON array`,
responses: {
200: {
description: 'Log content',
content: {
'application/json': {
schema: z.array(z.record(z.string(), z.any())).openapi({
description: 'Array of log entries'
})
}
}
},
404: {
description: 'Log file not found'
}
}
});
const streamRoute = createRoute({
method: 'get',
path: `${pathPrefix}/stream`,
tags: [tag],
summary: `Stream ${tag} logs`,
description: `Stream ${tag} logs via SSE (Server-Sent Events)`,
responses: {
200: {
description: 'Log stream',
content: {
'text/event-stream': { schema: z.string() }
}
}
}
});
return { getRoute, streamRoute };
};
/**
* Creates Hono handlers for standard log endpoints.
* @param filePath The absolute path to the log file
*/
export const createLogHandlers = (filePath: string) => {
const getHandler = async (c: Context) => {
if (!fs.existsSync(filePath)) {
return c.text('Log file not found', 404);
}
c.header('Content-Type', 'application/json');
return stream(c, async (stream) => {
const fileStream = fs.createReadStream(filePath);
const rl = readline.createInterface({
input: fileStream,
crlfDelay: Infinity
});
await stream.write('[');
let first = true;
for await (const line of rl) {
if (!line.trim()) continue;
if (!first) {
await stream.write(',');
}
try {
// Validate JSON to ensure we don't stream garbage?
// Optional: overhead. Assuming pino writes valid JSON per line.
// Just writing the line directly is faster.
await stream.write(line);
first = false;
} catch (e) {
// Ignore broken lines
}
}
await stream.write(']');
});
};
const streamHandler = async (c: Context) => {
return streamSSE(c, async (stream) => {
// Send initial connection message
await stream.writeSSE({
data: JSON.stringify({ type: 'info', message: 'Connected to log stream' })
});
if (!fs.existsSync(filePath)) {
await stream.writeSSE({
data: JSON.stringify({ type: 'error', message: 'Log file not found' })
});
// We keep the stream open in case the file is created later
}
let currentSize = 0;
// Check initial size
if (fs.existsSync(filePath)) {
const stat = fs.statSync(filePath);
currentSize = stat.size;
// Optional: Tail current content?
// For simplicity, we start streaming NEW content.
// But let's verify if we should send a bit of context.
// If it's a JSON log, partial lines are bad.
// If it's text, partial lines are bad too.
// Let's just track current size and stream updates.
}
const checkInterval = 250; // Check every 250ms
const interval = setInterval(async () => {
try {
if (!fs.existsSync(filePath)) {
if (currentSize > 0) {
currentSize = 0; // File deleted
await stream.writeSSE({ data: JSON.stringify({ type: 'info', message: 'Log file deleted' }) });
}
return;
}
const stat = fs.statSync(filePath);
if (stat.size > currentSize) {
const sizeDiff = stat.size - currentSize;
const buffer = Buffer.alloc(sizeDiff);
const fd = fs.openSync(filePath, 'r');
try {
fs.readSync(fd, buffer, 0, sizeDiff, currentSize);
currentSize = stat.size;
const chunk = buffer.toString('utf-8');
// If it's line-delimited JSON or text
const lines = chunk.split('\n');
// Note: the last line might be incomplete if we read exactly to EOF while writing?
// But usually logger writes full lines.
// However, strictly we should handle buffering incomplete lines.
// For this "standard" goal, let's assume atomic writes or simple line splitting.
// To be safer with valid JSON, we could filter empty lines.
for (const line of lines) {
if (!line.trim()) continue;
await stream.writeSSE({ data: line });
}
} finally {
fs.closeSync(fd);
}
} else if (stat.size < currentSize) {
// Truncated / Rotated
currentSize = stat.size;
await stream.writeSSE({
data: JSON.stringify({ type: 'info', message: 'Log rotated' })
});
}
} catch (e) {
console.error('Stream error:', e);
}
}, checkInterval);
stream.onAbort(() => {
clearInterval(interval);
});
// Keep the stream alive
await new Promise(() => { });
});
};
return { getHandler, streamHandler };
};

View File

@ -0,0 +1,245 @@
import { WebSocketServer, WebSocket } from 'ws';
import { Server } from 'http';
import fs from 'fs';
import path from 'path';
import chokidar from 'chokidar';
type MessageHandler = (ws: WebSocket, payload: any) => void;
export class WebSocketManager {
private static instance: WebSocketManager;
private wss: WebSocketServer | null = null;
private handlers: Map<string, MessageHandler> = new Map();
private writeQueue: Promise<void> = Promise.resolve();
private constructor() {
// Register default handlers
this.registerHandler('log', this.handleLog.bind(this));
this.registerHandler('echo', (ws, payload) => ws.send(JSON.stringify({ type: 'echo', payload })));
this.registerHandler('ping', (ws, payload) => ws.send(JSON.stringify({ type: 'pong', id: payload.id })));
}
public static getInstance(): WebSocketManager {
if (!WebSocketManager.instance) {
WebSocketManager.instance = new WebSocketManager();
}
return WebSocketManager.instance;
}
public init(server: Server) {
if (this.wss) {
console.warn('WebSocketServer already initialized');
return;
}
console.log('Initializing WebSocket Server...');
this.wss = new WebSocketServer({ server, path: '/ws' });
this.wss.on('connection', (ws: WebSocket) => {
console.log('Client connected');
ws.on('message', (message: string) => {
try {
const data = JSON.parse(message.toString());
const { command, ...payload } = data;
if (command && this.handlers.has(command)) {
this.handlers.get(command)!(ws, payload);
} else {
console.warn('Unknown command:', command);
}
} catch (err) {
console.error('Failed to parse message:', err);
}
});
ws.on('close', () => {
console.log('Client disconnected');
});
ws.on('error', (err) => {
console.error('WebSocket error:', err);
});
});
this.initWatcher();
}
private initWatcher() {
// Watch for changes in canvas-page-new.json
const logDir = path.join(process.cwd(), 'data');
// Ensure log directory exists
if (!fs.existsSync(logDir)) {
try {
fs.mkdirSync(logDir, { recursive: true });
} catch (err) {
console.error('Failed to create log directory for watcher:', err);
}
}
const handleFile = async (filePath: string) => {
// Ignore output files (logs) to prevent infinite loops (Frontend -> Log -> Watcher -> Frontend -> Loop)
const fileName = path.basename(filePath);
const ext = path.extname(filePath).toLowerCase();
// Explicitly allow only specific JSON files (layouts) to trigger updates
// Ignore everything else (logs, dumps, etc.)
if (ext === '.json') {
if (fileName !== 'canvas-page-latest-new.json' && fileName !== 'canvas-page-new.json') {
return;
}
} else if (fileName.startsWith('canvas-html-latest')) {
return;
}
console.log(`[Watcher] File detected: ${filePath}`);
try {
const ext = path.extname(filePath).toLowerCase();
if (ext === '.json') {
const content = await fs.promises.readFile(filePath, 'utf-8');
if (!content.trim()) return; // Ignore empty writes
try {
const layoutData = JSON.parse(content);
console.log('Broadcasting layout-update (json)...');
this.broadcast({
type: 'layout-update',
data: layoutData
});
} catch (parseErr) {
console.error(`Failed to parse watched JSON file: ${filePath}`, parseErr);
}
} else if (ext === '.html' || ext === '.md') {
const content = await fs.promises.readFile(filePath, 'base64');
console.log(`Broadcasting layout-update (${ext})...`);
this.broadcast({
type: 'layout-update',
data: content
});
}
} catch (err) {
console.error(`Failed to process watched file ${filePath}:`, err);
}
};
chokidar.watch(logDir, {
persistent: true,
ignoreInitial: false,
awaitWriteFinish: {
stabilityThreshold: 100,
pollInterval: 100
}
})
.on('add', handleFile)
.on('change', handleFile);
}
public registerHandler(command: string, handler: MessageHandler) {
if (this.handlers.has(command)) {
console.warn(`Handler for command '${command}' is being overwritten.`);
}
this.handlers.set(command, handler);
console.log(`Registered WebSocket handler for: ${command}`);
}
public broadcast(message: any) {
if (!this.wss) return;
const data = JSON.stringify(message);
this.wss.clients.forEach((client) => {
if (client.readyState === WebSocket.OPEN) {
client.send(data);
}
});
}
private handleLog(ws: WebSocket, payload: any) {
// Expected payload: { name: string, options?: { mode?: 'append'|'overwrite', format?: 'json'|'html'|'md' }, message: any, ...others }
const { name, id, options, ...logData } = payload;
if (!name) {
console.warn('Log command missing "name" field');
return;
}
const mode = options?.mode || 'append';
const format = options?.format || 'json';
const logDir = path.join(process.cwd(), 'data');
const extension = format === 'md' ? 'md' : format === 'html' ? 'html' : 'json';
const logFile = path.join(logDir, `${name}.${extension}`);
// Ensure log directory exists
if (!fs.existsSync(logDir)) {
try {
fs.mkdirSync(logDir, { recursive: true });
} catch (err) {
console.error('Failed to create log directory:', err);
return;
}
}
// Serialize writes using the queue
this.writeQueue = this.writeQueue.then(async () => {
try {
if (format === 'json') {
if (mode === 'overwrite') {
// For overwrite (state capture), write only the message content if available
const content = (logData.message !== undefined) ? logData.message : logData;
const contentToWrite = JSON.stringify(content, null, 2);
await fs.promises.writeFile(logFile, contentToWrite);
} else {
// For append (logging), read existing, parse, append to array, write back
let records: any[] = [];
try {
if (fs.existsSync(logFile)) {
const fileContent = await fs.promises.readFile(logFile, 'utf-8');
if (fileContent.trim()) {
try {
const parsed = JSON.parse(fileContent);
if (Array.isArray(parsed)) {
records = parsed;
} else {
records = [parsed];
}
} catch (e) {
// Attempt to parse as NDJSON (newline delimited JSON)
records = fileContent.split('\n')
.filter(line => line.trim())
.map(line => {
try { return JSON.parse(line); } catch { return null; }
})
.filter(item => item !== null);
}
}
}
} catch (readErr) {
console.warn(`Failed to read log file ${logFile}, starting fresh.`, readErr);
}
const logEntry = {
timestamp: new Date().toISOString(),
...logData
};
records.push(logEntry);
await fs.promises.writeFile(logFile, JSON.stringify(records, null, 2));
}
} else {
// HTML or MD
const message = logData.message;
const content = typeof message === 'string' ? message : JSON.stringify(message);
if (mode === 'append') {
await fs.promises.appendFile(logFile, content + '\n');
} else {
await fs.promises.writeFile(logFile, content);
}
}
} catch (err) {
console.error(`Failed to write log file ${logFile}:`, err);
}
});
}
}

View File

@ -0,0 +1 @@
export const TEST_POST_ID = '8c1d567a-6909-4e43-b432-bd359bb10fc5';

View File

@ -0,0 +1,152 @@
import { Context } from 'hono';
import { AbstractProduct } from '../AbstractProduct.js';
import fs from 'fs/promises';
import path from 'path';
import { fileURLToPath } from 'url';
import { substitute } from '@polymech/commons/variables';
import { logger } from './logger.js';
import { renderAstroPageRoute } from './routes.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const templatesDir = process.env.ASTRO_TEMPLATES_DIR
? path.resolve(process.cwd(), process.env.ASTRO_TEMPLATES_DIR)
: path.resolve(__dirname, '../../../templates/astro');
// Cache templates in memory
const templates: Record<string, string> = {};
const loadTemplate = async (name: string) => {
if (templates[name]) return templates[name];
try {
const filePath = path.join(templatesDir, `${name}.astro`);
const content = await fs.readFile(filePath, 'utf-8');
templates[name] = content;
return content;
} catch (e) {
logger.error({ err: e, name, templatesDir }, `Failed to load template ${name}`);
throw e;
}
};
export class AstroProduct extends AbstractProduct<any> {
id = 'astro';
jobOptions = {};
actions = {};
workers: any[] = [];
routes: any[] = [];
hash = () => 'astro-hash';
meta = () => ({});
constructor() {
super();
this.initializeRoutes();
}
initializeRoutes() {
this.routes.push({
definition: renderAstroPageRoute,
handler: this.handleRenderAstroPage.bind(this)
});
}
async handleRenderAstroPage(c: Context) {
const id = c.req.param('id');
logger.info({ id }, 'Handling request for Astro Export');
const { supabase } = await import('../../commons/supabase.js');
// 1. Fetch Page Data
const { data: page, error: pageError } = await supabase
.from('pages')
.select('*')
.eq('id', id)
.single();
if (pageError || !page) {
logger.error({ id, error: pageError }, 'Failed to fetch page for Astro export');
return c.text('Page not found', 404);
}
const { data: author } = await supabase.from('profiles').select('*').eq('user_id', page.owner).single();
// 2. Load Template
let template = '';
try {
template = await loadTemplate('page');
} catch (e) {
return c.text('Template not found', 500);
}
// 3. Prepare Content
const serverUrl = process.env.VITE_SERVER_IMAGE_API_URL || 'http://localhost:3333';
let markdownContent = '';
let extractedImages: string[] = [];
try {
const rawContent = typeof page.content === 'string' ? JSON.parse(page.content) : page.content;
// Determine content root
let root = rawContent;
if (rawContent && rawContent.pages) {
const pageIdKey = `page-${page.id}`;
if (rawContent.pages[pageIdKey]) {
root = rawContent.pages[pageIdKey];
} else {
const keys = Object.keys(rawContent.pages);
if (keys.length > 0) root = rawContent.pages[keys[0]];
}
}
if (root && root.containers && Array.isArray(root.containers)) {
root.containers.forEach((container: any) => {
if (container.widgets && Array.isArray(container.widgets)) {
container.widgets.forEach((widget: any) => {
if (widget.widgetId === 'markdown-text' && widget.props && widget.props.content) {
markdownContent += widget.props.content + '\n\n';
}
});
}
});
} else if (typeof page.content === 'string') {
markdownContent = page.content;
}
} catch (e) {
markdownContent = typeof page.content === 'string' ? page.content : '';
}
// Resolve Images in Markdown
markdownContent = markdownContent.replace(/!\[(.*?)\]\((.*?)\)/g, (match, alt, url) => {
let fullUrl = url;
if (!url.startsWith('http') && !url.startsWith('data:')) {
const cleanPath = url.startsWith('/') ? url.substring(1) : url;
fullUrl = `${serverUrl}/${cleanPath}`;
}
extractedImages.push(fullUrl);
return `![${alt}](${fullUrl})`;
});
// Generate Images Section (if extra images meant to be in gallery)
// For now, empty or maybe duplicate extracted images if needed, but template has ${images_section}
let imagesSection = '';
if (extractedImages.length > 0) {
// Example gallery
// imagesSection = extractedImages.map(img => `<img src="${img}" />`).join('\n');
}
// 4. Substitute
const result = substitute(false, template, {
title: page.title || 'Untitled',
description: 'Exported from Polymech',
author: author?.display_name || 'Author',
date: new Date(page.created_at).toISOString().split('T')[0],
tags: JSON.stringify(page.tags || []),
content: markdownContent,
images_section: imagesSection
});
c.header('Content-Type', 'application/octet-stream'); // Force download
c.header('Content-Disposition', `attachment; filename="${(page.title || 'page').replace(/[^a-z0-9]/gi, '_')}.astro"`);
return c.body(result);
}
}

View File

@ -0,0 +1,6 @@
import pino from 'pino';
export const logger = pino({
name: 'astro-product',
level: process.env.LOG_LEVEL || 'info',
});

View File

@ -0,0 +1,22 @@
import { createRoute, z } from '@hono/zod-openapi';
export const renderAstroPageRoute = createRoute({
method: 'get',
path: '/api/export/astro/{id}',
request: {
params: z.object({
id: z.string()
})
},
responses: {
200: {
description: 'Rendered Astro Page',
content: {
'text/plain': {
schema: z.string()
}
}
},
404: { description: 'Page not found' }
}
});

View File

@ -0,0 +1,69 @@
import { describe, it, expect } from 'vitest';
import { app } from '../../../index.js';
import { logger } from '../logger.js';
describe('Email Product E2E', () => {
it('should generate email HTML at /api/render/email/:id', async () => {
const { supabase } = await import('../../../commons/supabase.js');
const envId = process.env.DEFAULT_POST_TEST_ID; // undefined
let id = envId;
if (!id) {
const { data: posts } = await supabase.from('posts').select('id').order('created_at', { ascending: false }).limit(1);
id = posts?.[0]?.id;
}
logger.info({ id }, 'Testing Email Render with Post ID');
if (!id) {
logger.warn('Skipping test: No post found in DB');
return;
}
const res = await app.request(`/api/render/email/${id}`);
expect(res.status).toBe(200);
expect(res.headers.get('Content-Type')).toContain('text/html');
const text = await res.text();
expect(text).toContain('<!DOCTYPE html>');
}, 120000);
it('should send email at /api/send/email/:id', async () => {
const { supabase } = await import('../../../commons/supabase.js');
const envId = process.env.DEFAULT_POST_TEST_ID;
let id = envId;
if (!id) {
const { data: posts } = await supabase.from('posts').select('id').order('created_at', { ascending: false }).limit(1);
id = posts?.[0]?.id;
}
logger.info({ id }, 'Testing Email Send with Post ID');
if (!id) {
logger.warn('Skipping test: No post found in DB');
return;
}
const res = await app.request(`/api/send/email/${id}`, {
method: 'POST',
body: JSON.stringify({
to: process.env.TEST_EMAIL_TO || 'cgoflyn@gmail.com',
subject: 'E2E Test Email'
}),
headers: {
'Content-Type': 'application/json'
}
});
if (res.status !== 200) {
const body = await res.text();
console.error('Email send failed:', body);
}
expect(res.status).toBe(200);
const json = await res.json();
expect(json.success).toBe(true);
expect(json.messageId).toBeDefined();
}, 120000);
});

View File

@ -0,0 +1,213 @@
import { Context } from 'hono';
import { AbstractProduct } from '../AbstractProduct.js';
import fs from 'fs/promises';
import path from 'path';
import { renderEmailRoute, sendEmailRoute } from './routes.js';
import { fileURLToPath } from 'url';
import { substitute } from '@polymech/commons/variables'
import { logger } from './logger.js';
import { test as sendEmailTest } from '@polymech/mail';
// Cache templates in memory
const templates: Record<string, string> = {};
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const templatesDir = process.env.EMAIL_TEMPLATES_DIR
? path.resolve(process.cwd(), process.env.EMAIL_TEMPLATES_DIR)
: path.resolve(__dirname, '../../../templates/email');
const loadTemplate = async (name: string) => {
if (templates[name]) return templates[name];
try {
const filePath = path.join(templatesDir, `${name}.html`);
const content = await fs.readFile(filePath, 'utf-8');
templates[name] = content;
return content;
} catch (e) {
logger.error({ err: e, name, templatesDir }, `Failed to load template ${name}`);
throw e;
}
};
export class EmailProduct extends AbstractProduct<any> {
id = 'email';
jobOptions = {};
actions = {};
workers: any[] = [];
routes: any[] = [];
hash = () => 'email-hash';
meta = () => ({});
constructor() {
super();
this.initializeRoutes();
}
initializeRoutes() {
this.routes.push({
definition: renderEmailRoute,
handler: this.handleRenderEmail.bind(this)
});
this.routes.push({
definition: sendEmailRoute,
handler: this.handleSendEmail.bind(this)
});
}
async generateEmailHtml(id: string, serverUrl: string) {
logger.info({ id }, 'generateEmailHtml: Starting');
// Dynamic import to avoid circular dep issues if any, though likely fine here
const { supabase } = await import('../../commons/supabase.js');
logger.info('generateEmailHtml: Supabase imported');
// 1. Fetch Post Data
logger.info({ id }, 'generateEmailHtml: Fetching post data');
const { data: post, error: postError } = await supabase
.from('posts')
.select(`
*,
pictures: pictures (
*
)
`)
.eq('id', id)
.single();
if (postError || !post) {
logger.error({ id, error: postError }, 'generateEmailHtml: Post not found or error');
throw new Error('Post not found');
}
logger.info({ id, postTitle: post.title }, 'generateEmailHtml: Post found');
// 2. Load Templates
logger.info('generateEmailHtml: Loading templates');
const bodyTmpl = await loadTemplate('body');
const sectionTitleTmpl = await loadTemplate('section_title');
const textTmpl = await loadTemplate('text');
const imageXlTmpl = await loadTemplate('image_xl');
const imageSmTextTmpl = await loadTemplate('image_sm_text');
logger.info('generateEmailHtml: Templates loaded');
// 3. Build Content
let contentHtml = '';
// Title
contentHtml += substitute(false, sectionTitleTmpl, {
title: post.title || 'Untitled Gallery'
});
// Description
if (post.description) {
contentHtml += substitute(false, textTmpl, {
content: post.description
});
}
// Media Items
const pictures = (post.pictures || []).sort((a: any, b: any) => a.position - b.position);
logger.info({ pictureCount: pictures.length }, 'generateEmailHtml: Processing pictures');
for (const pic of pictures) {
const imageUrl = pic.image_url.startsWith('http')
? pic.image_url
: `${serverUrl}/storage/v1/object/public/${pic.image_url}`;
if (pic.title || pic.description) {
// Use Image + Text
contentHtml += substitute(false, imageSmTextTmpl, {
'image': imageUrl,
'imageClass': 'imgSmall',
title: pic.title || '',
content: pic.description || '',
href: imageUrl,
hrefTitle: 'View Full Size'
});
} else {
// Just Image (Large)
contentHtml += substitute(false, imageXlTmpl, {
'image': imageUrl
});
}
}
// 4. Final Assembly
logger.info('generateEmailHtml: Assembling final HTML');
const fullHtml = substitute(false, bodyTmpl, {
title: post.title || 'Polymech',
color_bg: '#1f1f1f',
SOURCE: contentHtml,
mail: 'user@example.com',
unsubscribe_link: '#'
});
logger.info('generateEmailHtml: Completed');
return fullHtml;
}
async handleRenderEmail(c: Context) {
const id = c.req.param('id');
logger.info({ id }, 'Rendering email for post');
const serverUrl = process.env.VITE_SERVER_IMAGE_API_URL || 'http://localhost:3333';
try {
const html = await this.generateEmailHtml(id, serverUrl);
return c.html(html);
} catch (e: any) {
if (e.message === 'Post not found') {
return c.text('Post not found', 404);
}
throw e;
}
}
async handleSendEmail(c: Context) {
const id = c.req.param('id');
logger.info({ id }, 'Sending email for post');
const serverUrl = process.env.VITE_SERVER_IMAGE_API_URL || 'http://localhost:3333';
let body: any = {};
try {
body = await c.req.json();
} catch (e) {
// Ignore JSON parse error, use defaults
}
const to = body.to || process.env.TEST_EMAIL_TO || 'cgoflyn@gmail.com';
const subject = body.subject || 'Polymech Gallery Export';
try {
const html = body.html || await this.generateEmailHtml(id, serverUrl);
// Send email
// Using 'ethereal' as transport, assuming it's configured in default config.
// If strictly zero-config for the user means we should assume they have env vars or config set up for 'ethereal'.
// Alternatively, we could default to 'smtp' if 'ethereal' fails, but let's stick to one.
const info = await sendEmailTest({
from: process.env.TEST_EMAIL_FROM || 'newsletter@osr-plastic.org',
to,
subject,
html,
transport: 'newsletter'
});
const ret = c.json({ success: true, messageId: info.messageId });
return ret;
} catch (e: any) {
debugger
logger.error({ err: e }, 'Failed to send email');
if (e.message === 'Post not found') {
return c.text('Post not found', 404);
}
return c.text('Failed to send email', 500);
}
}
}

View File

@ -0,0 +1,30 @@
import pino from 'pino';
import path from 'path';
const logFile = path.join(process.cwd(), 'logs', 'email.json');
const fileTransport = pino.transport({
target: 'pino/file',
options: { destination: logFile, mkdir: true }
});
const consoleTransport = pino.transport({
target: 'pino-pretty',
options: {
colorize: true,
ignore: 'pid,hostname',
destination: 1,
},
});
export const logger = pino(
{
level: process.env.PINO_LOG_LEVEL || 'info',
base: { product: 'email' },
timestamp: pino.stdTimeFunctions.isoTime,
},
pino.multistream([
{ stream: fileTransport, level: 'info' },
{ stream: consoleTransport, level: 'info' },
])
);

View File

@ -0,0 +1,83 @@
import { createRoute, z } from '@hono/zod-openapi';
export const renderEmailRoute = createRoute({
method: 'get',
path: '/api/render/email/:id',
tags: ['Email'],
summary: 'Render Email HTML',
description: 'Generates email HTML for the specified post.',
request: {
params: z.object({
id: z.string().uuid().openapi({
param: {
name: 'id',
in: 'path',
},
example: 'd5d1e9fc-8e0c-49d9-8a0e-78f637b47935',
}),
}),
},
responses: {
200: {
description: 'Email HTML',
content: {
'text/html': {
schema: z.string(),
},
},
},
404: {
description: 'Post not found',
},
},
});
export const sendEmailRoute = createRoute({
method: 'post',
path: '/api/send/email/:id',
tags: ['Email'],
summary: 'Send Email',
description: 'Generates and sends an email for the specified post.',
request: {
params: z.object({
id: z.string().uuid().openapi({
param: {
name: 'id',
in: 'path',
},
example: 'd5d1e9fc-8e0c-49d9-8a0e-78f637b47935',
}),
}),
body: {
content: {
'application/json': {
schema: z.object({
to: z.string().email().optional(),
subject: z.string().optional(),
html: z.string().optional()
})
}
}
}
},
responses: {
200: {
description: 'Email sent successfully',
content: {
'application/json': {
schema: z.object({
success: z.boolean(),
messageId: z.string().optional()
}),
},
},
},
404: {
description: 'Post not found',
},
500: {
description: 'Failed to send email'
}
},
});

View File

@ -0,0 +1,293 @@
import { describe, it, expect } from 'vitest';
import { app } from '@/index.js';
import fs from 'fs/promises';
import path from 'path';
describe('Images Product E2E', async () => {
const TEST_DIR = path.resolve(process.cwd(), 'tests/resize');
// Get all JPG files
// Note: This needs to be done before tests or inside a test that generates others,
// but in Vitest clean way is to maintain list or just map array.
// Since we need async fs, we can just do it in the suite body if top-level await is supported,
// or inside a single test that loops.
// Better pattern for dynamic tests:
const files = await fs.readdir(TEST_DIR);
const jpgFiles = files.filter(f => f.toLowerCase().endsWith('.jpg'));
if (jpgFiles.length === 0) {
it('should have test assets', () => {
console.warn('No JPG files found in tests/resize');
});
}
jpgFiles.forEach(filename => {
it(`should upload and resize ${filename}`, async () => {
const filePath = path.join(TEST_DIR, filename);
const stats = await fs.stat(filePath);
expect(stats.isFile()).toBe(true);
const fileContent = await fs.readFile(filePath);
const file = new File([fileContent], filename, { type: 'image/jpeg' });
const form = new FormData();
form.append('file', file);
form.append('width', '200');
form.append('height', '200');
form.append('format', 'webp');
const serverUrl = process.env.SERVER_URL || 'http://localhost:3000';
const postReq = new Request(`${serverUrl}/api/images`, {
method: 'POST',
body: form
});
// Execute Request
const res = await app.request(postReq);
// Verify Redirect
expect(res.status).toBe(303);
const location = res.headers.get('location');
expect(location).toBeTruthy();
expect(location).toContain('/api/images/cache/');
// Follow Redirect
// console.log(`Following redirect to: ${location}`);
const getReq = new Request(`${serverUrl}${location}`);
const res2 = await app.request(getReq);
console.log(res2.url);
// Verify Image Response
expect(res2.status).toBe(200);
expect(res2.headers.get('content-type')).toBe('image/webp');
const blob = await res2.blob();
expect(blob.size).toBeGreaterThan(0);
});
});
it('should use preset if provided', async () => {
const filename = jpgFiles[0]; // Use first available test file
const filePath = path.join(TEST_DIR, filename);
const form = new FormData();
const fileContent = await fs.readFile(filePath);
form.append('file', new File([fileContent], filename, { type: 'image/jpeg' }));
// form.append('preset', 'desktop:thumb'); // Now URL param
const serverUrl = process.env.SERVER_URL || 'http://localhost:3000';
const postReq = new Request(`${serverUrl}/api/images?preset=desktop:thumb`, {
method: 'POST',
body: form
});
const res = await app.request(postReq);
expect(res.status).toBe(303);
const location = res.headers.get('location');
expect(location).toContain('.avif'); // Preset uses avif
const getReq = new Request(`${serverUrl}${location}`);
const res2 = await app.request(getReq);
expect(res2.status).toBe(200);
expect(res2.headers.get('content-type')).toBe('image/avif'); // Verify format override
});
it('should allow overriding preset values', async () => {
const filename = jpgFiles[0];
const filePath = path.join(TEST_DIR, filename);
const form = new FormData();
const fileContent = await fs.readFile(filePath);
form.append('file', new File([fileContent], filename, { type: 'image/jpeg' }));
form.append('format', 'webp'); // Override 'avif' from desktop:thumb
form.append('width', '50'); // Override 150 from desktop:thumb
const serverUrl = process.env.SERVER_URL || 'http://localhost:3000';
// preset=desktop:thumb normally implies 150x150 avif
const postReq = new Request(`${serverUrl}/api/images?preset=desktop:thumb`, {
method: 'POST',
body: form
});
const res = await app.request(postReq);
expect(res.status).toBe(303);
const location = res.headers.get('location');
expect(location).toContain('.webp'); // Should be webp
const getReq = new Request(`${serverUrl}${location}`);
const res2 = await app.request(getReq);
expect(res2.status).toBe(200);
expect(res2.headers.get('content-type')).toBe('image/webp');
// We could also check dimensions if we parsed the image, but the cache hash check implicitly checks params
// hash includes `w50`
});
it('should forward to supabase and return valid json', async () => {
const filename = jpgFiles[0];
const filePath = path.join(TEST_DIR, filename);
const form = new FormData();
const fileContent = await fs.readFile(filePath);
form.append('file', new File([fileContent], filename, { type: 'image/jpeg' }));
const serverUrl = process.env.SERVER_URL || 'http://localhost:3000';
const postReq = new Request(`${serverUrl}/api/images?preset=desktop:thumb&forward=supabase`, {
method: 'POST',
body: form
});
const res = await app.request(postReq);
// Ensure we actually got a success response, otherwise fail
if (res.status !== 200) {
const text = await res.text();
console.error('Supabase response:', res.status, text);
}
expect(res.status).toBe(200);
if (res.status === 200) {
const data: any = await res.json();
expect(data.url).toBeTruthy();
expect(data.width).toBe(150); // desktop:thumb
expect(data.filename).toBeTruthy();
console.log('Supabase Upload URL:', data.url);
// Cleanup: Delete from Supabase
// Import dynamically to avoid top-level dependency if not needed
const { supabase } = await import('../../../commons/supabase.js');
const bucket = process.env.SUPABASE_BUCKET || 'pictures';
// data.filename should be 'cache/...' or just filename depending on implementation
// In index.ts we returned `storagePath` as `filename` field? Let's verify index.ts
// Yes: `filename: storagePath` which is `cache/${filename}`
const { error } = await supabase.storage
.from(bucket)
.remove([data.filename]);
if (error) {
console.error('Failed to cleanup Supabase test file:', error);
} else {
console.log('Cleaned up Supabase test file:', data.filename);
}
} else {
const text = await res.text();
console.warn('Supabase test skipped or failed (check env vars):', res.status, text);
// If internal server error due to missing creds, we might want to skip or fail gently
// But user asked for this test, so failing is appropriate if creds are missing but expected.
}
});
it('should transform image with operations', async () => {
const filename = jpgFiles[0];
const filePath = path.join(TEST_DIR, filename);
const form = new FormData();
const fileContent = await fs.readFile(filePath);
const file = new File([fileContent], filename, { type: 'image/jpeg' });
const operations = [
{ type: 'rotate', angle: 90 },
{ type: 'resize', width: 100, height: 100, fit: 'cover' }
];
form.append('file', file);
form.append('operations', JSON.stringify(operations));
const serverUrl = process.env.SERVER_URL || 'http://localhost:3000';
const postReq = new Request(`${serverUrl}/api/images/transform`, {
method: 'POST',
body: form
});
const res = await app.request(postReq);
expect(res.status).toBe(200);
const contentType = res.headers.get('content-type');
expect(contentType).toMatch(/image\/.*/);
const blob = await res.blob();
expect(blob.size).toBeGreaterThan(0);
// Basic check that it returned *something* successfully
});
it('should adjust image brightness and contrast', async () => {
const filename = jpgFiles[0];
const filePath = path.join(TEST_DIR, filename);
const form = new FormData();
const fileContent = await fs.readFile(filePath);
const file = new File([fileContent], filename, { type: 'image/jpeg' });
const operations = [
{ type: 'adjust', brightness: 1.2, contrast: 1.1 }
];
form.append('file', file);
form.append('operations', JSON.stringify(operations));
const serverUrl = process.env.SERVER_URL || 'http://localhost:3000';
const postReq = new Request(`${serverUrl}/api/images/transform`, {
method: 'POST',
body: form
});
const res = await app.request(postReq);
expect(res.status).toBe(200);
const contentType = res.headers.get('content-type');
expect(contentType).toMatch(/image\/.*/);
const blob = await res.blob();
expect(blob.size).toBeGreaterThan(0);
});
it('should extract exif data during supabase forward', async () => {
const filenames = jpgFiles.filter(f => f.includes('exif'));
if (filenames.length === 0) {
console.warn('Skipping EXIF test, test asset exif.jpg not found');
return;
}
const filename = filenames[0];
const filePath = path.join(TEST_DIR, filename);
const form = new FormData();
const fileContent = await fs.readFile(filePath);
form.append('file', new File([fileContent], filename, { type: 'image/jpeg' }));
const serverUrl = process.env.SERVER_URL || 'http://localhost:3000';
const postReq = new Request(`${serverUrl}/api/images?forward=supabase`, {
method: 'POST',
body: form
});
const res = await app.request(postReq);
expect(res.status).toBe(200);
const data: any = await res.json();
expect(data.url).toBeTruthy();
expect(data.meta).toBeTruthy();
console.log('Returned Meta:', data.meta);
// Verify some expected EXIF data if possible
// The file IMG20250911123721.jpg implies a date 2025-09-11
if (data.meta.dateTaken) {
const date = new Date(data.meta.dateTaken);
expect(date.getFullYear()).toBe(2025);
}
// Cleanup
if (data.filename) {
const { supabase } = await import('../../../commons/supabase.js');
const bucket = process.env.SUPABASE_BUCKET || 'pictures';
await supabase.storage.from(bucket).remove([data.filename]);
}
});
});

View File

@ -0,0 +1,604 @@
import { Context } from 'hono';
import { AbstractProduct } from '../AbstractProduct.js';
import { postImageRoute, getImageRoute, postResponsiveImageRoute, getImageLogsRoute, streamImageLogsRoute, renderImageRoute, postTransformRoute } from './routes.js';
import { createLogHandlers } from '../../commons/log-routes-factory.js';
import sharp from 'sharp';
import fs from 'fs/promises';
import path from 'path';
import { createHash } from 'crypto';
import { logger } from './logger.js';
import { getPresets } from './presets.js';
import 'dotenv/config';
export const CACHE_DIR = process.env.CACHE_DIR || path.join(process.cwd(), 'cache');
export const CACHE_TTL = process.env.CACHE_DEFAULT_TTL ? Math.floor(parseInt(process.env.CACHE_DEFAULT_TTL) / 1000) : 31536000;
export async function ensureCachedImage(inputBuffer: Buffer, width: number, height: number | undefined, format: string): Promise<string> {
const { filename } = await _ensureCachedImage(inputBuffer, width, height, format);
return filename;
}
export async function _ensureCachedImage(inputBuffer: Buffer, width: number, height: number | undefined, format: string): Promise<{ filename: string; hit: boolean }> {
// 1. Hash compatibility with ResponsiveImage logic
let hashStr = `w${width}`;
if (height) hashStr += `h${height}`;
hashStr += `f${format}`;
const hash = createHash('sha256')
.update(inputBuffer)
.update(hashStr)
.digest('hex');
const filename = `${hash}.${format}`;
const filepath = path.join(CACHE_DIR, filename);
// 2. Check Cache
try {
await fs.access(filepath);
return { filename, hit: true };
} catch {
// Miss
}
// 3. Process
const pipeline = sharp(inputBuffer);
pipeline.resize({
width,
height,
withoutEnlargement: true,
fit: 'inside'
});
pipeline.toFormat(format as keyof sharp.FormatEnum);
const processedBuffer = await pipeline.toBuffer();
await fs.writeFile(filepath, processedBuffer);
return { filename, hit: false };
}
export async function ensureCachedImageFromUrl(url: string, width: number | undefined, height: number | undefined, format: string): Promise<string> {
const { filename } = await _ensureCachedImageFromUrl(url, width, height, format);
return filename;
}
export async function _ensureCachedImageFromUrl(url: string, width: number | undefined, height: number | undefined, format: string): Promise<{ filename: string; hit: boolean }> {
// 1. URL-Based Hash Key (Deterministic)
const hashKey = createHash('sha256')
.update(url)
.update(`w${width}h${height}f${format}`)
.digest('hex');
const filename = `${hashKey}.${format}`;
const filepath = path.join(CACHE_DIR, filename);
// 2. Check Cache
try {
await fs.access(filepath);
return { filename, hit: true };
} catch {
// Miss
}
// 3. Fetch & Process
const fetchRes = await fetch(url);
if (!fetchRes.ok) throw new Error(`Failed to fetch image: ${fetchRes.statusText}`);
const arrayBuffer = await fetchRes.arrayBuffer();
const inputBuffer = Buffer.from(arrayBuffer);
const pipeline = sharp(inputBuffer);
if (width || height) {
pipeline.resize({
width,
height,
withoutEnlargement: true,
fit: 'inside'
});
}
pipeline.toFormat(format as keyof sharp.FormatEnum);
const processedBuffer = await pipeline.toBuffer();
// 4. Save Cache
await fs.writeFile(filepath, processedBuffer);
return { filename, hit: false };
}
export class ImagesProduct extends AbstractProduct<any> {
id = 'images';
jobOptions = {};
actions = {};
workers = [];
routes!: any[];
constructor() {
super();
const { getHandler, streamHandler } = createLogHandlers(path.join(process.cwd(), 'logs', 'images.json'));
this.routes = [
{ definition: postImageRoute, handler: this.handlePostImage.bind(this) },
{ definition: getImageRoute, handler: this.handleGetImage.bind(this) },
{ definition: postResponsiveImageRoute, handler: this.handlePostResponsive.bind(this) },
{ definition: getImageLogsRoute, handler: getHandler },
{ definition: streamImageLogsRoute, handler: streamHandler },
{ definition: renderImageRoute, handler: this.handleRenderImage.bind(this) },
{ definition: postTransformRoute, handler: this.handleTransformImage.bind(this) }
];
}
async onStart() {
// Ensure cache directory exists
try {
await fs.mkdir(CACHE_DIR, { recursive: true });
} catch (err) {
logger.error({ err }, 'Failed to create cache directory');
}
}
hash(data: any): string {
return 'images-hash';
}
meta(userId: string): any {
return { userId };
}
async handlePostImage(c: Context) {
try {
const body = await c.req.parseBody();
const file = body['file'];
const presets = getPresets();
const presetName = c.req.query('preset');
const forward = c.req.query('forward');
const useCache = c.req.query('cache') !== 'false';
const isOriginal = c.req.query('original') === 'true';
let preset: any = {};
if (presetName && presets[presetName]) {
preset = presets[presetName];
}
if (!(file instanceof File)) {
return c.text('No file uploaded', 400);
}
const buffer = await file.arrayBuffer();
const inputBuffer = Buffer.from(buffer);
// Detect metadata for defaults
const meta = await sharp(inputBuffer).metadata();
// Precedence: Explicit > Preset > Default (Original Format / 2048px)
const width = body['width'] ? parseInt(body['width'] as string) : (preset['width'] || (isOriginal ? undefined : 2048));
const height = body['height'] ? parseInt(body['height'] as string) : (preset['height'] || undefined);
const format = (body['format'] as string) || (preset['format'] || meta.format || 'jpeg');
const fit = (preset['fit'] || 'inside') as keyof sharp.FitEnum;
// Generate hash for filename based on content + params
const hash = createHash('sha256')
.update(inputBuffer)
.update(`w${width}h${height}f${format}`)
.digest('hex');
const filename = `${hash}.${format}`;
const filepath = path.join(CACHE_DIR, filename);
let processedBuffer: Buffer | null = null;
// 1. Try Cache
if (useCache) {
try {
processedBuffer = await fs.readFile(filepath);
logger.debug({ filename }, 'Image cache hit - read from disk');
} catch {
// Not found in cache
}
}
if (!processedBuffer) {
// 2. Process if no cache
logger.debug({ filename }, 'Image cache miss - processing');
const pipeline = sharp(inputBuffer);
if (width || height) {
pipeline.resize({
width,
height,
withoutEnlargement: true,
fit: fit
});
}
pipeline.toFormat(format as keyof sharp.FormatEnum);
processedBuffer = await pipeline.toBuffer();
// Save to cache immediately
await fs.writeFile(filepath, processedBuffer);
}
if (!processedBuffer) {
throw new Error('Image processing failed to produce buffer');
}
// --- 1. SUPABASE FORWARDING ---
if (forward === 'supabase') {
try {
// Check env vars before import
if (!process.env.SUPABASE_URL || !process.env.SUPABASE_SERVICE_KEY) {
throw new Error('Missing Supabase credentials in server environment');
}
const { supabase } = await import('../../commons/supabase.js');
const bucket = process.env.SUPABASE_BUCKET || 'pictures';
const storagePath = `cache/${filename}`;
const { error: uploadError } = await supabase.storage
.from(bucket)
.upload(storagePath, processedBuffer, {
contentType: `image/${format}`,
upsert: true
});
if (uploadError) {
logger.error({ err: uploadError, bucket, storagePath }, 'Supabase upload failed');
return c.json({ error: 'Failed to upload to external storage', details: uploadError.message }, 502);
}
const { data: { publicUrl } } = supabase.storage
.from(bucket)
.getPublicUrl(storagePath);
const sharpMeta = await sharp(processedBuffer).metadata();
// Extract EXIF Metadata
const { extractImageMetadata } = await import('./metadata.js');
const exifMeta = await extractImageMetadata(processedBuffer);
return c.json({
url: publicUrl,
width: sharpMeta.width,
height: sharpMeta.height,
format: sharpMeta.format,
size: sharpMeta.size,
filename: storagePath,
meta: exifMeta
});
} catch (err: any) {
logger.error({ err }, 'Supabase forwarding error');
const status = err.message.includes('Missing Supabase') ? 500 : 502;
return c.json({ error: err.message }, status as any);
}
}
// --- 2. LOCAL CACHING (Handled above) ---
// processedBuffer is already written to cache if it was processed, or read from cache if hit.
const url = `/api/images/cache/${filename}`;
logger.info({ url }, 'Image processed and cached');
return c.redirect(url, 303);
} catch (err: any) {
logger.error({ err }, 'Image processing failed');
return c.text(err.message, 500);
}
}
async handleGetImage(c: Context) {
const filename = c.req.param('filename');
if (!filename) return c.text('Filename required', 400);
// Sanitize filename to prevent directory traversal
const safeFilename = path.basename(filename);
const filepath = path.join(CACHE_DIR, safeFilename);
try {
const content = await fs.readFile(filepath);
// Infer mimetype from extension logic or just 'application/octet-stream' / specific type
// Basic inference:
const ext = path.extname(safeFilename).slice(1);
let mime = 'application/octet-stream';
if (['jpg', 'jpeg'].includes(ext)) mime = 'image/jpeg';
else if (ext === 'png') mime = 'image/png';
else if (ext === 'webp') mime = 'image/webp';
else if (ext === 'gif') mime = 'image/gif';
else if (ext === 'avif') mime = 'image/avif';
c.header('Content-Type', mime);
c.header('Cache-Control', `public, max-age=${CACHE_TTL}, immutable`);
return c.body(content);
} catch (err) {
return c.text('Not found', 404);
}
}
async handlePostResponsive(c: Context) {
const start = performance.now();
const variantStats: any[] = [];
try {
const body = await c.req.parseBody();
const file = body['file'];
const url = body['url'] as string;
const sizesJson = body['sizes'] as string;
const formatsJson = body['formats'] as string;
let inputBuffer: Buffer;
if (file instanceof File) {
const buffer = await file.arrayBuffer();
inputBuffer = Buffer.from(buffer);
} else if (url) {
const urlHash = createHash('sha256').update(url).digest('hex');
const sourceFilename = `source_${urlHash}`;
const sourcePath = path.join(CACHE_DIR, sourceFilename);
try {
inputBuffer = await fs.readFile(sourcePath);
} catch {
const fetchRes = await fetch(url);
if (!fetchRes.ok) throw new Error(`Failed to fetch URL: ${fetchRes.statusText}`);
const arrayBuffer = await fetchRes.arrayBuffer();
inputBuffer = Buffer.from(arrayBuffer);
// Cache the source image
await fs.writeFile(sourcePath, inputBuffer).catch(e => {
logger.error({ err: e }, 'Failed to write source cache');
});
}
} else {
return c.text('No file or URL provided', 400);
}
// Defaults
const sizes: number[] = sizesJson ? JSON.parse(sizesJson) : [180, 640, 1024, 2048];
const formats: string[] = formatsJson ? JSON.parse(formatsJson) : ['avif', 'webp'];
const meta = await sharp(inputBuffer).metadata();
const originalFormat = meta.format || 'jpeg';
// Allow original format in output if requested or implicit
const targetFormats = formats.map(f => f === 'original' || f === 'jpg' ? (originalFormat === 'jpeg' ? 'jpeg' : originalFormat) : f);
// Deduplicate
const uniqueFormats = [...new Set(targetFormats)];
const uniqueSizes = [...new Set(sizes)].sort((a, b) => a - b);
const sources: { srcset: string; type: string }[] = [];
let fallbackSrc = '';
let fallbackWidth = 0;
let fallbackHeight = 0;
let fallbackFormat = '';
// Generate all variants
for (const format of uniqueFormats) {
const srcSetParts: string[] = [];
for (const width of uniqueSizes) {
const variantStart = performance.now();
let filename;
const baseUrl = process.env.SERVER_IMAGE_API_URL || process.env.VITE_SERVER_IMAGE_API_URL || 'http://localhost:3333';
const LAZY_THRESHOLD = 600;
const isLazy = url && width > LAZY_THRESHOLD;
if (isLazy) {
// LAZY GENERATION: Return Dynamic Render URL
// Do NOT process large images eagerly if we have a URL source
const renderUrl = `${baseUrl}/api/images/render?url=${encodeURIComponent(url)}&width=${width}&format=${format}`;
srcSetParts.push(`${renderUrl} ${width}w`);
variantStats.push({ width, format, lazy: true, duration: performance.now() - variantStart });
// For fallback calculation, we assume the requested width is what we get
// We skip reading meta/file access
if (!fallbackSrc || (format === 'jpeg' && fallbackFormat !== 'jpeg') || (width > fallbackWidth && format === fallbackFormat)) {
fallbackSrc = renderUrl;
fallbackWidth = width;
fallbackHeight = 0; // Unknown height without checking, but it's lazy
fallbackFormat = format;
}
continue;
}
// EAGER GENERATION
try {
const res = await _ensureCachedImage(inputBuffer, width, undefined, format);
filename = res.filename;
variantStats.push({ width, format, lazy: false, hit: res.hit, duration: performance.now() - variantStart });
} catch (e: any) {
logger.error({ err: e }, 'Failed to cache image variant');
variantStats.push({ width, format, error: e.message, duration: performance.now() - variantStart });
continue;
}
const cachedUrl = `${baseUrl}/api/images/cache/${filename}`;
srcSetParts.push(`${cachedUrl} ${width}w`);
// Update fallback to the largest version of the first format (or preferred format)
if (!fallbackSrc || (format === 'jpeg' && fallbackFormat !== 'jpeg') || (width > fallbackWidth && format === fallbackFormat)) {
fallbackSrc = cachedUrl;
fallbackWidth = width; // Use requested width as nominal fallback width
fallbackFormat = format;
}
}
sources.push({
srcset: srcSetParts.join(', '),
type: `image/${format}`
});
}
const totalDuration = performance.now() - start;
const seconds = Math.floor(totalDuration / 1000);
const ms = Math.floor(totalDuration % 1000);
const durationFormatted = `${seconds}:${ms.toString().padStart(3, '0')}`;
const performanceStats = {
totalDuration: durationFormatted,
variants: variantStats.map(v => {
const vSeconds = Math.floor(v.duration / 1000);
const vMs = Math.floor(v.duration % 1000);
return { ...v, duration: `${vSeconds}:${vMs.toString().padStart(3, '0')}` };
}),
url: url ? url : 'file-upload'
};
logger.debug({
msg: 'Responsive image generation complete',
performance: performanceStats
});
return c.json({
img: {
src: fallbackSrc,
width: fallbackWidth,
height: fallbackHeight,
format: fallbackFormat
},
sources,
stats: performanceStats
});
} catch (err: any) {
logger.error({ err }, 'Responsive image generation failed');
return c.text(err.message, 500);
}
}
async handleRenderImage(c: Context) {
try {
const url = c.req.query('url');
if (!url) return c.text('URL required', 400);
const widthStr = c.req.query('width');
const heightStr = c.req.query('height');
const formatStr = c.req.query('format');
const width = widthStr ? parseInt(widthStr) : undefined;
const height = heightStr ? parseInt(heightStr) : undefined;
const format = formatStr || 'jpeg';
// 1. URL-Based Hash Key
const hashKey = createHash('sha256')
.update(url)
.update(`w${width}h${height}f${format}`)
.digest('hex');
const filename = `${hashKey}.${format}`;
const filepath = path.join(CACHE_DIR, filename);
// 2. Check Cache
try {
const content = await fs.readFile(filepath);
// Serve Cache
c.header('Content-Type', `image/${format}`);
c.header('Cache-Control', `public, max-age=${CACHE_TTL}, immutable`);
return c.body(content);
} catch {
// Miss - proceed to fetch
}
// 3. Fetch & Process
const fetchRes = await fetch(url);
if (!fetchRes.ok) throw new Error(`Failed to fetch image: ${fetchRes.statusText}`);
const arrayBuffer = await fetchRes.arrayBuffer();
const inputBuffer = Buffer.from(arrayBuffer);
const pipeline = sharp(inputBuffer);
if (width || height) {
pipeline.resize({
width,
height,
withoutEnlargement: true,
fit: 'inside'
});
}
pipeline.toFormat(format as keyof sharp.FormatEnum);
const processedBuffer = await pipeline.toBuffer();
// 4. Save Cache
await fs.writeFile(filepath, processedBuffer);
// 5. Serve
c.header('Content-Type', `image/${format}`);
c.header('Cache-Control', `public, max-age=${CACHE_TTL}, immutable`);
//@todos : fix pre-render
return c.body(processedBuffer as any);
} catch (err: any) {
logger.error({ err }, 'Render image failed');
return c.text('Internal Server Error', 500);
}
}
async handleTransformImage(c: Context) {
try {
const body = await c.req.parseBody();
const file = body['file'];
const operationsJson = body['operations'] as string;
if (!(file instanceof File)) {
return c.text('No file uploaded', 400);
}
const buffer = await file.arrayBuffer();
const inputBuffer = Buffer.from(buffer);
const operations = operationsJson ? JSON.parse(operationsJson) : [];
let pipeline = sharp(inputBuffer);
for (const op of operations) {
if (op.type === 'rotate') {
pipeline = pipeline.rotate(op.angle);
} else if (op.type === 'resize') {
pipeline = pipeline.resize({
width: op.width,
height: op.height,
fit: op.fit || 'contain',
background: { r: 0, g: 0, b: 0, alpha: 0 }
});
} else if (op.type === 'crop') {
pipeline = pipeline.extract({
left: Math.round(op.x),
top: Math.round(op.y),
width: Math.round(op.width),
height: Math.round(op.height)
});
} else if (op.type === 'flip') {
if (op.direction === 'horizontal') pipeline = pipeline.flop();
if (op.direction === 'vertical') pipeline = pipeline.flip();
} else if (op.type === 'adjust') {
if (op.brightness) {
// Sharp's modulate brightness is a multiplier (1.0 = original) if using recent versions,
// or it might be different.
// Actually pipeline.modulate({ brightness: 0.5 }) usually means 0.5x brightness?
// Let's check docs or assume standard multiplier.
// Wait, common Sharp modulate brightness is additive? No, documentation says "brightness: brightness multiplier".
pipeline = pipeline.modulate({ brightness: op.brightness });
}
if (op.contrast) {
// Contrast: C * (pixel - 128) + 128 => C * pixel + (128 - 128*C)
// This keeps mid-grey fixed.
const a = op.contrast;
const b = 128 * (1 - a);
pipeline = pipeline.linear(a, b);
}
}
}
const processedBuffer = await pipeline.toBuffer();
const meta = await sharp(processedBuffer).metadata();
c.header('Content-Type', `image/${meta.format}`);
return c.body(processedBuffer as any);
} catch (err: any) {
logger.error({ err }, 'Image transformation failed');
return c.text(err.message, 500);
}
}
}

View File

@ -0,0 +1,30 @@
import pino from 'pino';
import path from 'path';
const logFile = path.join(process.cwd(), 'logs', 'images.json');
const fileTransport = pino.transport({
target: 'pino/file',
options: { destination: logFile, mkdir: true }
});
const consoleTransport = pino.transport({
target: 'pino-pretty',
options: {
colorize: true,
ignore: 'pid,hostname',
destination: 1,
},
});
export const logger = pino(
{
level: process.env.PINO_LOG_LEVEL || 'info',
base: { product: 'images' },
timestamp: pino.stdTimeFunctions.isoTime,
},
pino.multistream([
{ stream: fileTransport, level: 'info' },
{ stream: consoleTransport, level: 'info' },
])
);

View File

@ -0,0 +1,120 @@
import ExifReader from 'exifreader';
import path from 'path';
// Extract date from EXIF data
export async function extractImageDate(exifData: any): Promise<{ year: number, dateTaken: Date | null }> {
let dateTaken: Date | null = null;
try {
const dateTimeOriginal = exifData?.['DateTimeOriginal']?.description;
const dateTime = exifData?.['DateTime']?.description;
const createDate = exifData?.['CreateDate']?.description;
// Parse EXIF date (format: "YYYY:MM:DD HH:MM:SS")
const exifDateString = dateTimeOriginal || dateTime || createDate;
if (exifDateString) {
const [datePart, timePart] = exifDateString.split(' ');
const [year, month, day] = datePart.split(':').map(Number);
const [hour, minute, second] = (timePart || '00:00:00').split(':').map(Number);
dateTaken = new Date(year, month - 1, day, hour, minute, second);
}
} catch (e) {
// Warning managed by caller
}
// Fallback if no date found is null, handled by caller
return {
year: dateTaken ? dateTaken.getFullYear() : new Date().getFullYear(),
dateTaken
};
}
// Extract comprehensive metadata from image file or buffer
export async function extractImageMetadata(input: string | Buffer): Promise<{
year: number;
dateTaken: Date | null;
title: string;
description: string;
keywords: string[];
width?: number;
height?: number;
exifRaw?: any;
gps?: { lat: number | null, lon: number | null };
rotation?: number;
}> {
let exifRaw: any = null;
try {
// ExifReader.load supports Buffer or filePath
// TS has trouble with the union type 'string | Buffer' against the overloads
if (typeof input === 'string') {
exifRaw = await ExifReader.load(input);
} else {
exifRaw = await ExifReader.load(input);
}
} catch (e) {
console.warn(`Error loading EXIF data:`, e);
exifRaw = {};
}
// Get date information
const { year, dateTaken } = await extractImageDate(exifRaw);
// Metadata Priority Logic
const keywordsStr = exifRaw?.['LastKeywordXMP']?.description || exifRaw?.iptc?.Keywords?.description || '';
const keywords = keywordsStr ? keywordsStr.split(',').map((k: string) => k.trim()) : [];
const exifDescription = exifRaw?.['ImageDescription']?.description || '';
const width = exifRaw?.['Image Width']?.value;
const height = exifRaw?.['Image Height']?.value;
const title = exifRaw?.title?.description || '';
const description = exifDescription || exifRaw?.iptc?.['Caption/Abstract']?.description || '';
// GPS
let lat: number | null = null;
let lon: number | null = null;
if (exifRaw?.['GPSLatitude'] && exifRaw?.['GPSLongitude']) {
// ExifReader provides convenient description for these?
// Actually usually it's an array of numbers. ExifReader might detail it.
// description often is "44, 23.4, 0"
// Let's rely on documentation or standard output. ExifReader usually returns array of numbers.
// But the user snippet used .description. Let's start with basic extraction or use the raw if available.
// Actually for simplicity let's store the description string if we want, but the prompt asked for "location"
// Let's try to extract components if they exist as description, otherwise null.
// Ideally we want decimal degrees.
// ExifReader usually offers decoded values.
// TODO: Validate what ExifReader.load returns for GPS in this version.
// Common pattern:
// GPSLatitude: { description: 45.123, value: [45, 12, 30], ... }
}
const orientation = exifRaw?.['Orientation']?.value || 1;
// Map EXIF orientation to rotation degrees
let rotation = 0;
switch (orientation) {
case 3: rotation = 180; break;
case 6: rotation = 90; break;
case 8: rotation = 270; break;
}
// Clean up RAW to avoid massive JSON
// We can keep 'exif' object but maybe remove binary buffers (ICC, thumbnail)
const cleanExif = { ...exifRaw };
delete cleanExif['Thumbnail'];
delete cleanExif['MakerNote'];
delete cleanExif['UserComment'];
return {
year,
dateTaken,
title,
description,
keywords,
width,
height,
exifRaw: cleanExif,
gps: { lat, lon }, // Placeholder for now, can refine if user wants precise geo-decoding
rotation
};
}

View File

@ -0,0 +1,26 @@
import { logger } from './logger.js';
export interface ImagePreset {
width?: number;
height?: number;
format?: string;
fit?: 'cover' | 'contain' | 'fill' | 'inside' | 'outside';
}
const DEFAULTS: Record<string, ImagePreset> = {
'desktop:thumb': { width: 150, height: 150, format: 'avif', fit: 'cover' },
'desktop:medium': { width: 800, format: 'avif', fit: 'inside' },
'desktop:large': { width: 1920, format: 'avif', fit: 'inside' },
};
export const getPresets = (): Record<string, ImagePreset> => {
let envPresets = {};
if (process.env.IMAGE_PRESETS) {
try {
envPresets = JSON.parse(process.env.IMAGE_PRESETS);
} catch (e) {
logger.warn('Failed to parse IMAGE_PRESETS from env');
}
}
return { ...DEFAULTS, ...envPresets };
};

View File

@ -0,0 +1,201 @@
import { createRoute, z } from '@hono/zod-openapi';
import { createLogRoutes } from '../../commons/log-routes-factory.js';
export const { getRoute: getImageLogsRoute, streamRoute: streamImageLogsRoute } = createLogRoutes('Images', '/api/images/logs');
export const postImageRoute = createRoute({
method: 'post',
path: '/api/images',
tags: ['Images'],
summary: 'Upload and resize image',
description: 'Upload an image and get a resized version via redirect.',
request: {
query: z.object({
preset: z.string().optional().openapi({ description: 'Predefined preset (e.g., desktop:thumb)' }),
forward: z.string().optional().openapi({ description: 'Forward to external storage (e.g., supabase)' }),
cache: z.string().optional().default('true').openapi({ description: 'Use cache (true/false)' })
}),
body: {
content: {
'multipart/form-data': {
schema: z.object({
file: z.any().openapi({
type: 'string',
format: 'binary',
description: 'Image file'
}),
width: z.string().optional(),
height: z.string().optional(),
format: z.string().optional(),
})
}
}
}
},
responses: {
303: {
description: 'Redirect to processed image (local cache)',
},
200: {
description: 'Image processed and uploaded',
content: {
'application/json': {
schema: z.object({
url: z.string(),
width: z.number().optional(),
height: z.number().optional(),
format: z.string().optional(),
size: z.number().optional(),
filename: z.string().optional()
})
}
}
},
500: {
description: 'Server error',
}
}
});
export const getImageRoute = createRoute({
method: 'get',
path: '/api/images/cache/:filename',
tags: ['Images'],
summary: 'Get cached image',
description: 'Serving processed images from cache map.',
request: {
params: z.object({
filename: z.string()
})
},
responses: {
200: {
description: 'Image file',
},
404: {
description: 'Not found',
}
}
});
export const postResponsiveImageRoute = createRoute({
method: 'post',
path: '/api/images/responsive',
tags: ['Images'],
summary: 'Generate responsive images',
description: 'Upload an image and get a list of responsive versions.',
request: {
body: {
content: {
'multipart/form-data': {
schema: z.object({
file: z.any().openapi({
type: 'string',
format: 'binary',
description: 'Image file'
}),
sizes: z.string().optional().openapi({ description: 'JSON array of widths, e.g. [640, 1024]' }),
formats: z.string().optional().openapi({ description: 'JSON array of formats, e.g. ["webp", "jpg"]' }),
})
}
}
}
},
responses: {
200: {
description: 'Responsive images generated',
content: {
'application/json': {
schema: z.object({
img: z.object({
src: z.string(),
width: z.number(),
height: z.number(),
format: z.string(),
}),
sources: z.array(z.object({
srcset: z.string(),
type: z.string()
}))
})
}
}
},
303: {
description: 'Redirect to cached image',
},
500: {
description: 'Server error',
}
}
});
export const renderImageRoute = createRoute({
method: 'get',
path: '/api/images/render',
tags: ['Images'],
summary: 'Render lazy-optimized image',
description: 'Fetch, resize, and serve an image from a remote URL. Intended for use in srcSet.',
request: {
query: z.object({
url: z.string().openapi({ description: 'Remote URL to fetch' }),
width: z.string().optional().openapi({ description: 'Target width' }),
height: z.string().optional().openapi({ description: 'Target height' }),
format: z.string().optional().openapi({ description: 'Output format (jpeg, webp, etc.)' })
})
},
responses: {
200: {
description: 'Image content',
content: {
'image/*': {
schema: z.string().openapi({ format: 'binary' })
}
}
},
500: {
description: 'Server error'
}
}
});
export const postTransformRoute = createRoute({
method: 'post',
path: '/api/images/transform',
tags: ['Images'],
summary: 'Transform image',
description: 'Apply operations (resize, crop, rotate) to an uploaded image.',
request: {
body: {
content: {
'multipart/form-data': {
schema: z.object({
file: z.any().openapi({
type: 'string',
format: 'binary',
description: 'Image file'
}),
operations: z.string().openapi({
description: 'JSON array of operations: [{ type: "rotate", angle: 90 }, { type: "resize", width: 100 }, { type: "crop", x: 0, y: 0, width: 100, height: 100 }]'
})
})
}
}
}
},
responses: {
200: {
description: 'Transformed image',
content: {
'image/*': {
schema: z.string().openapi({ format: 'binary' })
}
}
},
500: {
description: 'Server error',
}
}
});

View File

@ -0,0 +1,60 @@
import { describe, it, expect } from 'vitest';
import { app } from '../../../index.js';
import { logger } from '../logger.js';
describe('Pdf Product E2E', () => {
it('should probe the PDF service', async () => {
const res = await app.request('/api/pdf/probe');
expect(res.status).toBe(200);
expect(await res.text()).toBe('PDF Service OK');
});
it('should generate PDF at /api/render/pdf/:id', async () => {
const { supabase } = await import('../../../commons/supabase.js');
const envId = process.env.DEFAULT_POST_TEST_ID;
let id = envId;
if (!id) {
const { data: posts } = await supabase.from('posts').select('id').order('created_at', { ascending: false }).limit(1);
id = posts?.[0]?.id;
}
logger.info({ id }, 'Testing with Post ID');
if (!id) {
logger.warn('Skipping test: No post found in DB : ' + process.env.DEFAULT_POST_TEST_ID);
return;
}
const res = await app.request(`/api/render/pdf/${id}`);
logger.info({ status: res.status }, 'PDF Response Status');
if (res.status !== 200) {
const body = await res.text();
logger.error({ body }, 'PDF Response Body');
}
expect(res.status).toBe(200);
expect(res.headers.get('Content-Type')).toContain('application/pdf');
// Check content size to ensure it's not empty
const buffer = await res.arrayBuffer();
expect(buffer.byteLength).toBeGreaterThan(100);
// Optional: Check content disposition
const disposition = res.headers.get('Content-Disposition');
expect(disposition).toContain('attachment');
expect(disposition).toContain('.pdf');
});
it('should return 404 for non-existent post', async () => {
const res = await app.request('/api/render/pdf/00000000-0000-0000-0000-000000000000');
// Likely 404 or 500 depending on handling.
// Product code returns: c.text('Post not found', 404);
expect(res.status).toBe(404);
});
});

View File

@ -0,0 +1,343 @@
import { Context } from 'hono';
import { AbstractProduct } from '../AbstractProduct.js';
import PdfPrinter from 'pdfmake';
import path from 'path';
import { logger } from './logger.js';
import { renderPdfRoute, renderPdfPageRoute, getProbeRoute } from './routes.js';
// Define fonts
const fonts = {
Roboto: {
normal: 'Helvetica',
bold: 'Helvetica-Bold',
italics: 'Helvetica-Oblique',
bolditalics: 'Helvetica-BoldOblique'
}
};
export class PdfProduct extends AbstractProduct<any> {
id = 'pdf';
jobOptions = {};
actions = {};
workers: any[] = [];
routes: any[] = [];
hash = () => 'pdf-hash';
meta = () => ({});
printer: PdfPrinter;
constructor() {
super();
this.printer = new PdfPrinter(fonts);
this.initializeRoutes();
}
initializeRoutes() {
this.routes.push({
definition: renderPdfRoute,
handler: this.handleRenderPdf.bind(this)
});
this.routes.push({
definition: renderPdfPageRoute,
handler: this.handleRenderPagePdf.bind(this)
});
this.routes.push({
definition: getProbeRoute,
handler: this.handleProbe.bind(this)
});
}
async handleProbe(c: Context) {
return c.text('PDF Service OK');
}
async handleRenderPdf(c: Context) {
const id = c.req.param('id');
logger.info({ id }, 'Handling request for PDF');
const serverUrl = process.env.VITE_SERVER_IMAGE_API_URL || 'http://localhost:3333';
const { supabase } = await import('../../commons/supabase.js');
// 1. Fetch Post Data
const { data: post, error: postError } = await supabase
.from('posts')
.select('*, pictures(*)')
.eq('id', id)
.single();
if (postError || !post) {
logger.error({ id, error: postError }, 'Failed to fetch post for PDF');
return c.text('Post not found', 404);
}
const { data: author } = await supabase.from('profiles').select('*').eq('user_id', post.user_id).single();
// 2. Prepare content
const docDefinition: any = {
content: [],
defaultStyle: {
font: 'Roboto'
},
styles: {
header: {
fontSize: 22,
bold: true,
margin: [0, 0, 0, 10]
},
description: {
fontSize: 12,
margin: [0, 0, 0, 20]
},
metadata: {
fontSize: 10,
color: 'gray',
margin: [0, 0, 0, 20]
},
imageCaption: {
fontSize: 12,
bold: true,
margin: [0, 5, 0, 2]
},
imageDesc: {
fontSize: 10,
color: '#444',
margin: [0, 0, 0, 20]
}
}
};
// Header
docDefinition.content.push({ text: post.title || 'Untitled Gallery', style: 'header' });
if (post.description) {
docDefinition.content.push({ text: post.description, style: 'description' });
}
docDefinition.content.push({ text: `By ${author?.display_name || 'Author'}`, style: 'metadata' });
// Images
const pictures = (post.pictures || []).sort((a: any, b: any) => a.position - b.position);
for (const pic of pictures) {
const imageUrl = pic.image_url.startsWith('http')
? pic.image_url
: `${serverUrl}/storage/v1/object/public/${pic.image_url}`;
try {
const response = await fetch(imageUrl);
if (response.ok) {
const arrayBuffer = await response.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
docDefinition.content.push({
image: buffer,
width: 500,
margin: [0, 0, 0, 10]
});
} else {
docDefinition.content.push({ text: `[Image: ${imageUrl}]`, color: 'blue', link: imageUrl });
}
} catch (e) {
docDefinition.content.push({ text: `[Image error: ${imageUrl}]` });
}
if (pic.title && pic.title !== post.title) {
docDefinition.content.push({ text: pic.title, style: 'imageCaption' });
}
if (pic.description && pic.description !== post.description) {
docDefinition.content.push({ text: pic.description, style: 'imageDesc' });
} else {
docDefinition.content.push({ text: '', margin: [0, 0, 0, 20] });
}
}
// 3. Render
const pdfDoc = this.printer.createPdfKitDocument(docDefinition);
// Pipe to response
return new Promise((resolve) => {
const chunks: any[] = [];
pdfDoc.on('data', (chunk: any) => chunks.push(chunk));
pdfDoc.on('end', () => {
const result = Buffer.concat(chunks);
c.header('Content-Type', 'application/pdf');
c.header('Content-Disposition', `attachment; filename="${(post.title || 'export').replace(/[^a-z0-9]/gi, '_')}.pdf"`);
resolve(c.body(result));
});
pdfDoc.end();
});
}
async handleRenderPagePdf(c: Context) {
const id = c.req.param('id');
logger.info({ id }, 'Handling request for Page PDF');
const { supabase } = await import('../../commons/supabase.js');
// 1. Fetch Page Data
const { data: page, error: pageError } = await supabase
.from('pages')
.select('*')
.eq('id', id)
.single();
if (pageError || !page) {
logger.error({ id, error: pageError }, 'Failed to fetch page for PDF');
return c.text('Page not found', 404);
}
const { data: author } = await supabase.from('profiles').select('*').eq('user_id', page.owner).single();
// 2. Prepare content
const docDefinition: any = {
content: [
{
toc: {
title: { text: 'Table of Contents', style: 'h2' }
}
},
{ text: '', pageBreak: 'after' } // TOC on separate page
],
defaultStyle: {
font: 'Roboto'
},
styles: {
header: {
fontSize: 24,
bold: true,
margin: [0, 0, 0, 10]
},
h1: { fontSize: 22, bold: true, margin: [0, 10, 0, 5] },
h2: { fontSize: 18, bold: true, margin: [0, 10, 0, 5] },
h3: { fontSize: 16, bold: true, margin: [0, 8, 0, 5] },
metadata: {
fontSize: 10,
color: 'gray',
margin: [0, 0, 0, 20]
},
para: {
fontSize: 12,
margin: [0, 0, 0, 10],
lineHeight: 1.3
}
}
};
// Header
docDefinition.content.push({ text: page.title || 'Untitled Page', style: 'header' });
docDefinition.content.push({ text: `By ${author?.display_name || 'Author'}`, style: 'metadata' });
// Parse content
let markdownContent = '';
try {
const rawContent = typeof page.content === 'string' ? JSON.parse(page.content) : page.content;
// Determine content root
let root = rawContent;
if (rawContent && rawContent.pages) {
const pageIdKey = `page-${page.id}`;
if (rawContent.pages[pageIdKey]) {
root = rawContent.pages[pageIdKey];
} else {
const keys = Object.keys(rawContent.pages);
if (keys.length > 0) root = rawContent.pages[keys[0]];
}
}
if (root && root.containers && Array.isArray(root.containers)) {
root.containers.forEach((container: any) => {
if (container.widgets && Array.isArray(container.widgets)) {
container.widgets.forEach((widget: any) => {
if (widget.widgetId === 'markdown-text' && widget.props && widget.props.content) {
markdownContent += widget.props.content + '\n\n';
}
});
}
});
} else if (typeof page.content === 'string') {
markdownContent = page.content; // Fallback
}
} catch (e) {
logger.warn({ error: e, id }, 'Failed to parse page content structure, using raw');
markdownContent = typeof page.content === 'string' ? page.content : JSON.stringify(page.content);
}
const lines = markdownContent.split('\n');
const serverUrl = process.env.VITE_SERVER_IMAGE_API_URL || 'http://localhost:3333';
// Process lines sequentially to maintain order, allowing async image fetching
for (const line of lines) {
const trim = line.trim();
if (!trim) continue;
// Simple Image Regex: ![alt](url)
const imageMatch = trim.match(/!\[(.*?)\]\((.*?)\)/);
if (imageMatch) {
let imageUrl = imageMatch[2];
const alt = imageMatch[1];
// Resolve relative URLs or non-http URLs
if (!imageUrl.startsWith('http')) {
if (!imageUrl.startsWith('data:')) {
const cleanPath = imageUrl.startsWith('/') ? imageUrl.substring(1) : imageUrl;
imageUrl = `${serverUrl}/${cleanPath}`;
}
}
try {
// Fetch image buffer
const response = await fetch(imageUrl);
if (response.ok) {
const arrayBuffer = await response.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
docDefinition.content.push({
image: buffer,
width: 500, // Reasonable default width for PDF
margin: [0, 10, 0, 10]
});
// Optional: Add caption
if (alt && alt.trim()) {
docDefinition.content.push({ text: alt, fontSize: 10, italics: true, alignment: 'center', margin: [0, 0, 0, 10] });
}
} else {
// Fallback to link if fetch fails
docDefinition.content.push({ text: `[Image: ${alt}]`, color: 'blue', link: imageUrl, margin: [0, 10, 0, 10] });
}
} catch (e) {
logger.error({ error: e, imageUrl }, 'Failed to fetch image for PDF');
docDefinition.content.push({ text: `[Image Error: ${alt}]`, color: 'red', margin: [0, 10, 0, 10] });
}
continue;
}
if (trim.startsWith('# ')) {
docDefinition.content.push({ text: trim.substring(2), style: 'h1', tocItem: true });
} else if (trim.startsWith('## ')) {
docDefinition.content.push({ text: trim.substring(3), style: 'h2', tocItem: true });
} else if (trim.startsWith('### ')) {
docDefinition.content.push({ text: trim.substring(4), style: 'h3', tocItem: true });
} else if (trim.startsWith('- ')) {
docDefinition.content.push({
ul: [trim.substring(2)],
margin: [10, 0, 0, 5]
});
} else {
docDefinition.content.push({ text: trim, style: 'para' });
}
}
// 3. Render
const pdfDoc = this.printer.createPdfKitDocument(docDefinition);
// Pipe to response
return new Promise((resolve) => {
const chunks: any[] = [];
pdfDoc.on('data', (chunk: any) => chunks.push(chunk));
pdfDoc.on('end', () => {
const result = Buffer.concat(chunks);
c.header('Content-Type', 'application/pdf');
c.header('Content-Disposition', `attachment; filename="${(page.title || 'page').replace(/[^a-z0-9]/gi, '_')}.pdf"`);
resolve(c.body(result));
});
pdfDoc.end();
});
}
}

View File

@ -0,0 +1,30 @@
import pino from 'pino';
import path from 'path';
const logFile = path.join(process.cwd(), 'logs', 'pdf.json');
const fileTransport = pino.transport({
target: 'pino/file',
options: { destination: logFile, mkdir: true }
});
const consoleTransport = pino.transport({
target: 'pino-pretty',
options: {
colorize: true,
ignore: 'pid,hostname',
destination: 1,
},
});
export const logger = pino(
{
level: process.env.PINO_LOG_LEVEL || 'info',
base: { product: 'pdf' },
timestamp: pino.stdTimeFunctions.isoTime,
},
pino.multistream([
{ stream: fileTransport, level: 'info' },
{ stream: consoleTransport, level: 'info' },
])
);

View File

@ -0,0 +1,84 @@
import { createRoute, z } from '@hono/zod-openapi';
export const renderPdfRoute = createRoute({
method: 'get',
path: '/api/render/pdf/:id',
tags: ['PDF'],
summary: 'Render Post PDF',
description: 'Generates and downloads a custom PDF for the specified post.',
request: {
params: z.object({
id: z.string().openapi({
param: {
name: 'id',
in: 'path',
},
example: 'd5d1e9fc-8e0c-49d9-8a0e-78f637b47935',
}),
}),
},
responses: {
200: {
description: 'PDF File',
content: {
'application/pdf': {
schema: z.string().openapi({ format: 'binary' }),
},
},
},
404: {
description: 'Post not found',
},
},
});
export const renderPdfPageRoute = createRoute({
method: 'get',
path: '/api/render/pdf/page/:id',
tags: ['PDF'],
summary: 'Render Page PDF',
description: 'Generates and downloads a custom PDF for the specified page.',
request: {
params: z.object({
id: z.string().openapi({
param: {
name: 'id',
in: 'path',
},
example: 'page-uuid',
}),
}),
},
responses: {
200: {
description: 'PDF File',
content: {
'application/pdf': {
schema: z.string().openapi({ format: 'binary' }),
},
},
},
404: {
description: 'Page not found',
},
},
});
export const getProbeRoute = createRoute({
method: 'get',
path: '/api/pdf/probe',
tags: ['PDF'],
summary: 'Probe PDF Service',
description: 'Checks if the PDF service is active.',
responses: {
200: {
description: 'OK',
content: {
'text/plain': {
schema: z.string()
}
}
}
}
});

View File

@ -0,0 +1,83 @@
import { describe, expect, it, beforeAll } from 'vitest';
import { app } from '../../../index.js';
import fs from 'fs/promises';
import path from 'path';
// Helper to wait for job completion
const waitForJob = async (jobId: string, maxAttempts = 20) => {
for (let i = 0; i < maxAttempts; i++) {
const res = await app.request(`/api/videos/jobs/${jobId}`);
const json = await res.json();
if (json.status === 'completed') return json;
if (json.status === 'failed') throw new Error(`Job failed: ${json.error}`);
await new Promise(r => setTimeout(r, 1000));
}
throw new Error('Timeout waiting for job');
};
describe('Videos Product E2E', () => {
// Ensure we have a dummy video for testing
const TEST_VIDEO_DIR = path.join(process.cwd(), 'tests', 'video');
const TEST_VIDEO_NAME = 'small.mp4'; // Assume this exists as per user
const TEST_VIDEO_PATH = path.join(TEST_VIDEO_DIR, TEST_VIDEO_NAME);
debugger
// If file doesn't exist, we might skip or fail.
// User said "i place some in server/tests/video" so we assume it's there.
it('should list video presets (implicit in docs/code, but here we test job creation)', async () => {
// Only if we expose GET /api/videos/presets, but we didn't implement that yet.
expect(true).toBe(true);
});
it('should accept a video upload and process it', async () => {
// Read test file
// Note: In a real CI we might generate a blank video with ffmpeg first if missing
let videoBuffer: Buffer;
try {
videoBuffer = await fs.readFile(TEST_VIDEO_PATH);
} catch (e) {
console.warn('Skipping video upload test - test.mp4 not found in tests/video');
return;
}
const formData = new FormData();
const blob = new Blob([new Uint8Array(videoBuffer)], { type: 'video/mp4' });
formData.append('file', blob, 'uploaded.mp4');
const req = new Request('http://localhost:3333/api/videos?preset=mobile-480p', {
method: 'POST',
body: formData
});
const res = await app.fetch(req);
expect(res.status).toBe(202);
const json = await res.json();
expect(json.jobId).toBeDefined();
expect(json.preset).toBe('mobile-480p');
// Wait for job
const jobResult = await waitForJob(json.jobId, 110);
expect(jobResult.status).toBe('completed');
expect(jobResult.progress).toBe(100);
expect(jobResult.resultUrl).toContain('/api/videos/jobs/');
// Download result
const dlRes = await app.request(jobResult.resultUrl);
expect(dlRes.status).toBe(200);
const dlBlob = await dlRes.blob();
expect(dlBlob.size).toBeGreaterThan(0);
// Maybe check mime type
// expect(dlRes.headers.get('Content-Type')).toBe('video/mp4');
}, 120000); // Increase timeout for processing
it('should accept a video URL and process it (mocked if needed)', async () => {
// This is harder to test E2E without an external URL.
// We could serve a local file via Hono static and point to it?
// For now, let's just checking endpoint validation.
});
});

View File

@ -0,0 +1,124 @@
import { describe, expect, it } from 'vitest';
import { app } from '../../../index.js';
import fs from 'fs/promises';
import path from 'path';
describe('Video Upload & Probe', () => {
const TEST_VIDEO_DIR = path.join(process.cwd(), 'tests', 'video');
const TEST_VIDEO_NAME = 'small.mp4';
const TEST_VIDEO_PATH = path.join(TEST_VIDEO_DIR, TEST_VIDEO_NAME);
// Helper to wait for job completion
const waitForJob = async (jobId: string, maxAttempts = 100) => {
for (let i = 0; i < maxAttempts; i++) {
const res = await app.request(`http://localhost:3333/api/videos/jobs/${jobId}`);
if (res.status === 200) {
const json = await res.json();
console.log(`Waiting for job ${jobId}: ${json.status || 'unknown'}`);
if (json.status === 'completed') return json;
if (json.status === 'failed') throw new Error(`Job failed: ${json.error}`);
} else {
console.log(`Waiting for job ${jobId}: HTTP ${res.status}`);
}
await new Promise(r => setTimeout(r, 1000));
}
throw new Error('Timeout waiting for job');
};
it('should upload video, probe it, create supabase entry, and allow download', async () => {
// Authenticate to get real User ID
const { supabase } = await import('../../../commons/supabase.js');
const email = process.env.TEST_EMAIL;
const password = process.env.TEST_PASSWORD;
if (!email || !password) {
console.warn('Skipping test - TEST_EMAIL or TEST_PASSWORD missing');
return;
}
const { data: authData, error: authError } = await supabase.auth.signInWithPassword({
email,
password
});
if (authError || !authData.user) {
console.error('Auth failed:', authError);
throw new Error('Authentication failed for test user');
}
const userId = authData.user.id;
console.log('Test User ID:', userId);
let videoBuffer: Buffer;
try {
videoBuffer = await fs.readFile(TEST_VIDEO_PATH);
} catch (e) {
console.warn(`Skipping video upload test - ${TEST_VIDEO_NAME} not found in tests/video`);
return;
}
const formData = new FormData();
const blob = new Blob([new Uint8Array(videoBuffer)], { type: 'video/mp4' });
formData.append('file', blob, 'uploaded.mp4');
const res = await app.request(`http://localhost:3333/api/videos/upload?preset=web-720p&userId=${userId}&title=TestVideo`, {
method: 'POST',
body: formData
});
if (res.status !== 200) {
const errorText = await res.text();
console.error('Upload failed with:', errorText);
throw new Error(`Upload failed: ${res.status} ${errorText}`);
}
expect(res.status).toBe(200);
const json = await res.json();
expect(json).toHaveProperty('jobId');
expect(json).toHaveProperty('dbId');
expect(json).toHaveProperty('videoUrl');
expect(json.meta).toHaveProperty('mux_playback_id', json.jobId);
expect(json.meta).toHaveProperty('duration');
expect(json.meta).toHaveProperty('max_stored_resolution');
expect(json.meta.tracks[0]).toHaveProperty('max_width');
expect(json).toHaveProperty('thumbnailUrl');
console.log('Upload success:', json);
// Verify Thumbnail Download
console.log(`Downloading thumbnail from ${json.thumbnailUrl}...`);
const thumbRes = await app.request(json.thumbnailUrl);
expect(thumbRes.status).toBe(200);
const thumbBlob = await thumbRes.blob();
expect(thumbBlob.size).toBeGreaterThan(0);
expect(thumbRes.headers.get('Content-Type')).toBe('image/jpeg');
console.log('Thumbnail download success, size:', thumbBlob.size);
// Wait for Job Completion
console.log('Waiting for processing...');
const jobResult = await waitForJob(json.jobId);
expect(jobResult.status).toBe('completed');
console.log('Job completed');
// Verify Download (Preview URL)
console.log(`Downloading from ${json.videoUrl}...`);
const dlRes = await app.request(json.videoUrl);
expect(dlRes.status).toBe(200);
const dlBlob = await dlRes.blob();
expect(dlBlob.size).toBeGreaterThan(0);
expect(dlRes.headers.get('Content-Type')).toMatch(/video\//);
console.log('Download success, size:', dlBlob.size);
// Cleanup Supabase
const { error } = await supabase.from('pictures').delete().eq('id', json.dbId);
if (error) console.error('Cleanup failed:', error);
expect(error).toBeNull();
}, 120000);
});

View File

@ -0,0 +1,499 @@
import { Context } from 'hono';
import { AbstractProduct } from '../AbstractProduct.js';
import {
postVideoRoute,
getJobRoute,
downloadVideoRoute,
uploadVideoRoute,
getJobProgressRoute,
getHlsPlaylistRoute,
getHlsSegmentRoute,
cancelJobRoute,
getVideoLogsRoute,
streamVideoLogsRoute,
proxyVideoRoute
} from './routes.js';
import { VideoWorker, VIDEO_JOB_NAME } from './worker.js';
import fs from 'fs/promises';
import path from 'path';
import { logger } from './logger.js';
import { createHash } from 'crypto';
import { PgBoss } from 'pg-boss';
import { EventEmitter } from 'events';
import { streamSSE } from 'hono/streaming';
import { createLogHandlers } from '@/commons/log-routes-factory.js';
const STORAGE_DIR = path.resolve(process.cwd(), process.env.VIDEO_STORE || 'videos');
const CACHE_DIR = path.join(process.cwd(), 'cache');
export class VideosProduct extends AbstractProduct<any> {
id = 'videos';
routes!: any[];
jobOptions = {};
actions = {};
workers: VideoWorker[] = [];
private boss?: PgBoss;
public events = new EventEmitter();
hash(data: any): string {
return 'videos-hash';
}
meta(userId: string): any {
return { userId };
}
constructor() {
super();
const { getHandler, streamHandler } = createLogHandlers(path.join(process.cwd(), 'logs', 'videos.json'));
this.routes = [
{ definition: postVideoRoute, handler: this.handlePostVideo.bind(this) },
{ definition: uploadVideoRoute, handler: this.handleUploadVideo.bind(this) },
{ definition: getJobRoute, handler: this.handleGetJob.bind(this) },
{ definition: getJobProgressRoute, handler: this.handleGetJobProgress.bind(this) },
{ definition: getHlsPlaylistRoute, handler: this.handleGetHlsPlaylist.bind(this) },
{ definition: getHlsSegmentRoute, handler: this.handleGetHlsSegment.bind(this) },
{ definition: downloadVideoRoute, handler: this.handleDownloadVideo.bind(this) },
{ definition: cancelJobRoute, handler: this.handleCancelJob.bind(this) },
{ definition: getVideoLogsRoute, handler: getHandler },
{ definition: streamVideoLogsRoute, handler: streamHandler },
{ definition: proxyVideoRoute, handler: this.handleProxyVideo.bind(this) }
];
}
async onStart(boss?: PgBoss) {
// Ensure cache directory exists
try {
await fs.mkdir(CACHE_DIR, { recursive: true });
await fs.mkdir(STORAGE_DIR, { recursive: true });
} catch (err) { }
if (boss) {
this.boss = boss;
const worker = new VideoWorker();
worker.events = this.events; // Inject EventEmitter
this.workers.push(worker);
// Ensure queue exists
await boss.createQueue(worker.queueName);
// Register worker
await boss.work(worker.queueName, worker.handler.bind(worker) as any);
logger.info(`[VideosProduct] Registered worker: ${worker.queueName}`);
} else {
logger.warn('[VideosProduct] PgBoss not available, video processing will not work');
}
}
async handlePostVideo(c: Context) {
try {
if (!this.boss) {
return c.json({ error: 'Video processing service unavailable' }, 503);
}
let file: File | undefined;
let url: string | undefined;
// Handle Multipart
const contentType = c.req.header('Content-Type') || '';
if (contentType.includes('multipart/form-data')) {
const body = await c.req.parseBody();
file = body['file'] as File;
url = (body['url'] as string) || c.req.query('url');
} else {
// Handle JSON
try {
const json = await c.req.json();
url = json.url;
} catch {
url = c.req.query('url');
}
}
const presetName = c.req.query('preset') || 'original';
if (!file && !url) {
return c.json({ error: 'No file or URL provided' }, 400);
}
let inputPath = '';
let inputSource: 'file' | 'url' = 'url';
if (file) {
// Save uploaded file temporarily to cache so ffmpeg can read it
const buffer = await file.arrayBuffer();
const hash = createHash('sha256').update(Buffer.from(buffer)).digest('hex');
const tempFilename = `upload_${hash}.tmp`;
inputPath = path.join(CACHE_DIR, tempFilename);
await fs.writeFile(inputPath, Buffer.from(buffer));
inputSource = 'file';
} else {
// For URL, we use the URL directly
inputPath = url!;
inputSource = 'url';
}
// Create Job via Boss
const jobId = await this.boss.send(VIDEO_JOB_NAME, {
inputSource,
inputPath: inputSource === 'file' ? inputPath : undefined,
inputUrl: inputSource === 'url' ? inputPath : undefined,
preset: presetName
});
if (!jobId) {
throw new Error('Failed to create job');
}
return c.json({
jobId: jobId,
preset: presetName,
message: 'Video processing started'
}, 202);
} catch (err: any) {
console.error('DEBUG: handlePostVideo error', err);
logger.error({ err }, 'Failed to start video job');
return c.json({ error: err.message }, 500);
}
}
async handleUploadVideo(c: Context) {
try {
if (!this.boss) {
return c.json({ error: 'Service Unavailable' }, 503);
}
const body = await c.req.parseBody();
const file = body['file'] as File;
const presetName = c.req.query('preset') || 'original';
if (!file) {
return c.json({ error: 'No file provided' }, 400);
}
const buffer = await file.arrayBuffer();
const hash = createHash('sha256').update(Buffer.from(buffer)).digest('hex');
const tempFilename = `upload_${hash}.tmp`;
const inputPath = path.join(CACHE_DIR, tempFilename);
await fs.writeFile(inputPath, Buffer.from(buffer));
// Probe
const { probeVideo } = await import('./probe.js');
const meta = await probeVideo(inputPath);
// Submit Job
const jobId = await this.boss.send(VIDEO_JOB_NAME, {
inputSource: 'file',
inputPath: inputPath,
preset: presetName
});
if (!jobId) throw new Error('Job creation failed');
// Generate Thumbnail
const { extractThumbnail } = await import('./thumbnail.js');
const thumbFilename = `${jobId}_thumb.jpg`;
// Ensure CACHE_DIR exists
await fs.mkdir(CACHE_DIR, { recursive: true });
await extractThumbnail(inputPath, CACHE_DIR, thumbFilename);
// Prepare DB Entry
const { supabase } = await import('../../commons/supabase.js');
const baseUrl = process.env.SERVER_IMAGE_API_URL || process.env.VITE_SERVER_IMAGE_API_URL || 'http://localhost:3333';
// Should point to HLS playlist
const hlsUrl = `${baseUrl}/api/videos/jobs/${jobId}/hls/playlist.m3u8`;
const thumbnailUrl = `${baseUrl}/api/images/cache/${thumbFilename}`;
const dbMeta = {
status: "ready", // Ideally 'processing', but for now 'ready' means ready to exist in DB
tracks: [
{
id: jobId,
type: "video",
duration: meta.duration,
max_width: meta.width,
max_height: meta.height,
max_frame_rate: meta.fps,
...meta.stream
}
],
duration: meta.duration,
created_at: Math.floor(Date.now() / 1000).toString(),
aspect_ratio: meta.aspectRatio,
mux_asset_id: `asset_${jobId}`,
mux_upload_id: `upload_${jobId}`,
mux_playback_id: jobId,
max_stored_frame_rate: meta.fps,
max_stored_resolution: meta.height >= 2160 ? 'UHD' : (meta.height >= 1080 ? 'HD' : 'SD')
};
const userId = c.req.query('userId') || '00000000-0000-0000-0000-000000000000';
const title = c.req.query('title') || 'Video Upload';
const { data: dbData, error: dbError } = await supabase
.from('pictures')
.insert({
meta: dbMeta,
image_url: hlsUrl,
thumbnail_url: thumbnailUrl,
title: title,
user_id: userId,
type: 'video-intern'
})
.select()
.single();
if (dbError) {
logger.error({ dbError }, 'Supabase insert failed');
throw new Error(`Database insert failed: ${JSON.stringify(dbError)}`);
}
return c.json({
jobId,
dbId: dbData.id,
videoUrl: hlsUrl,
thumbnailUrl,
meta: dbMeta
});
} catch (err: any) {
logger.error({ err }, 'Upload Video failed');
return c.json({ error: err.message }, 500);
}
}
async handleGetJob(c: Context) {
if (!this.boss) {
return c.json({ error: 'Service unavailable' }, 503);
}
const id = c.req.param('id');
// @ts-ignore
const job = await this.boss.getJobById(VIDEO_JOB_NAME, id);
if (!job) {
return c.json({ error: 'Job not found' }, 404);
}
const baseUrl = process.env.SERVER_IMAGE_API_URL || process.env.VITE_SERVER_IMAGE_API_URL || 'http://localhost:3333';
const status = job.state;
const result = job.output as any;
// Use HLS URL for completed jobs
const resultUrl = status === 'completed'
? `${baseUrl}/api/videos/jobs/${job.id}/hls/playlist.m3u8`
: undefined;
let progress = 0;
if (status === 'completed') progress = 100;
return c.json({
id: job.id,
status: status,
progress: progress,
resultUrl,
error: null // Capture error if failed?
});
}
async handleGetHlsPlaylist(c: Context) {
const id = c.req.param('id');
const filepath = path.join(STORAGE_DIR, id, 'playlist.m3u8');
try {
const { createReadStream, statSync } = await import('fs');
statSync(filepath); // Check exist
const stream = createReadStream(filepath);
c.header('Content-Type', 'application/vnd.apple.mpegurl');
// @ts-ignore
return c.body(stream);
} catch (e) {
return c.text('Playlist not found', 404);
}
}
async handleCancelJob(c: Context) {
if (!this.boss) {
return c.json({ error: 'Service unavailable' }, 503);
}
const id = c.req.param('id');
logger.info({ jobId: id }, 'Received cancel request');
// Cancel PgBoss Job
try {
await this.boss.cancel(VIDEO_JOB_NAME, id);
} catch (e) {
console.error('Failed to cancel boss job', e);
}
// Kill Worker Process
// Assuming single worker for now
const worker = this.workers.find(w => w.queueName === VIDEO_JOB_NAME);
if (worker) {
worker.cancelJob(id);
}
return c.json({ message: 'Cancellation requested' });
}
async handleGetHlsSegment(c: Context) {
const id = c.req.param('id');
const segment = c.req.param('segment');
// Simple security check
if (segment.includes('..') || segment.includes('/') || segment.includes('\\')) {
return c.text('Invalid segment', 400);
}
const filepath = path.join(STORAGE_DIR, id, segment);
try {
const { createReadStream, statSync } = await import('fs');
statSync(filepath);
const stream = createReadStream(filepath);
c.header('Content-Type', 'video/MP2T');
// @ts-ignore
return c.body(stream);
} catch (e) {
return c.text('Segment not found', 404);
}
}
async handleGetJobProgress(c: Context) {
const id = c.req.param('id');
return streamSSE(c, async (stream) => {
const listener = (data: any) => {
// data could be { progress: number } or { log: string }
stream.writeSSE({
data: JSON.stringify(data),
event: 'progress',
id: String(Date.now())
});
};
this.events.on(`progress:${id}`, listener);
// Send initial connection message
await stream.writeSSE({
data: JSON.stringify({ connected: true }),
event: 'connected',
id: String(Date.now())
});
stream.onAbort(() => {
this.events.off(`progress:${id}`, listener);
});
// Wait forever (or until client disconnects)
await new Promise(() => { });
});
}
async handleDownloadVideo(c: Context) {
if (!this.boss) {
return c.json({ error: 'Service unavailable' }, 503);
}
const id = c.req.param('id');
// PgBoss getJobById usually takes (queue, id) or similar in newer versions if partitioned
// @ts-ignore
const job = await this.boss.getJobById(VIDEO_JOB_NAME, id);
if (!job || job.state !== 'completed') {
return c.text('Video not found or not ready', 404);
}
const result = job.output as any;
if (!result || !result.resultPath) {
return c.text('Video result missing', 404);
}
const filepath = path.join(STORAGE_DIR, result.resultPath);
console.log(`DEBUG: handleDownloadVideo id=${id} filepath=${filepath}`);
try {
const { createReadStream, statSync } = await import('fs');
const stats = statSync(filepath);
const fileSize = stats.size;
const range = c.req.header('range');
if (range) {
const parts = range.replace(/bytes=/, "").split("-");
const start = parseInt(parts[0], 10);
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
const chunksize = (end - start) + 1;
const stream = createReadStream(filepath, { start, end });
c.status(206);
c.header('Content-Range', `bytes ${start}-${end}/${fileSize}`);
c.header('Accept-Ranges', 'bytes');
c.header('Content-Length', chunksize.toString());
c.header('Content-Type', 'video/mp4');
// @ts-ignore
return c.body(stream);
} else {
const stream = createReadStream(filepath);
c.header('Content-Length', fileSize.toString());
c.header('Content-Type', 'video/mp4');
c.header('Accept-Ranges', 'bytes');
// @ts-ignore
return c.body(stream);
}
} catch (e) {
return c.text('File missing on disk', 500);
}
}
async handleProxyVideo(c: Context) {
const url = c.req.query('url');
if (!url) {
// @ts-ignore
return c.text('Missing URL', 400);
}
try {
// Forward the request to the target URL
const response = await fetch(url);
if (!response.ok) {
// @ts-ignore
return c.text(`Proxy fetch failed: ${response.statusText}`, response.status as ContentfulStatusCode);
}
// Stream the response body
if (response.body) {
// Forward headers
const contentType = response.headers.get('content-type');
const contentLength = response.headers.get('content-length');
if (contentType) c.header('Content-Type', contentType);
if (contentLength) c.header('Content-Length', contentLength);
c.header('Access-Control-Allow-Origin', '*'); // Ensure CORS is allowed for the proxy
// @ts-ignore
return c.body(response.body);
} else {
// @ts-ignore
return c.text('No content', 204);
}
} catch (e: any) {
console.error('Proxy error:', e);
// @ts-ignore
return c.text('Proxy error', 500);
}
}
}

View File

@ -0,0 +1,103 @@
import fs from 'fs/promises';
import path from 'path';
import { logger } from '../../commons/logger.js';
import { randomUUID } from 'crypto';
export type JobStatus = 'pending' | 'processing' | 'completed' | 'failed';
export interface VideoJob {
id: string;
status: JobStatus;
inputSource: 'file' | 'url';
inputPath?: string; // For file uploads
inputUrl?: string; // For URL downloads
preset: string;
createdAt: number;
updatedAt: number;
progress: number;
resultPath?: string;
error?: string;
}
const JOBS_FILE = path.join(process.cwd(), 'cache', 'video-jobs.json');
export class SimpleJobQueue {
private jobs: Map<string, VideoJob> = new Map();
private loaded = false;
private saveTimer: NodeJS.Timeout | null = null;
constructor() {
this.load();
}
private async load() {
try {
const data = await fs.readFile(JOBS_FILE, 'utf-8');
const json = JSON.parse(data);
if (Array.isArray(json)) {
json.forEach(j => this.jobs.set(j.id, j));
}
this.loaded = true;
} catch (e) {
// Check if file missing
this.loaded = true;
logger.info('Initialized new video jobs queue');
}
}
private async save() {
if (!this.loaded) return;
if (this.saveTimer) return;
this.saveTimer = setTimeout(async () => {
try {
const jobsArray = Array.from(this.jobs.values());
await fs.writeFile(JOBS_FILE, JSON.stringify(jobsArray, null, 2));
} catch (e) {
logger.error({ err: e }, 'Failed to save video jobs');
} finally {
this.saveTimer = null;
}
}, 500);
}
async add(jobData: Omit<VideoJob, 'id' | 'status' | 'createdAt' | 'updatedAt' | 'progress'>): Promise<VideoJob> {
if (!this.loaded) await this.load();
const job: VideoJob = {
id: randomUUID(),
status: 'pending',
createdAt: Date.now(),
updatedAt: Date.now(),
progress: 0,
...jobData
};
this.jobs.set(job.id, job);
this.save(); // Fire and forget save
return job;
}
async get(id: string): Promise<VideoJob | undefined> {
if (!this.loaded) await this.load();
return this.jobs.get(id);
}
async update(id: string, updates: Partial<VideoJob>) {
if (!this.loaded) await this.load();
const job = this.jobs.get(id);
if (job) {
Object.assign(job, updates, { updatedAt: Date.now() });
this.jobs.set(id, job);
this.save();
}
}
async list(): Promise<VideoJob[]> {
if (!this.loaded) await this.load();
return Array.from(this.jobs.values()).sort((a, b) => b.createdAt - a.createdAt);
}
}
export const jobQueue = new SimpleJobQueue();

View File

@ -0,0 +1,30 @@
import pino from 'pino';
import path from 'path';
const logFile = path.join(process.cwd(), 'logs', 'videos.json');
const fileTransport = pino.transport({
target: 'pino/file',
options: { destination: logFile, mkdir: true }
});
const consoleTransport = pino.transport({
target: 'pino-pretty',
options: {
colorize: true,
ignore: 'pid,hostname',
destination: 1,
},
});
export const logger = pino(
{
level: process.env.PINO_LOG_LEVEL || 'info',
base: { product: 'videos' },
timestamp: pino.stdTimeFunctions.isoTime,
},
pino.multistream([
{ stream: fileTransport, level: 'info' },
{ stream: consoleTransport, level: 'info' },
])
);

View File

@ -0,0 +1,56 @@
export interface VideoPreset {
id: string;
description: string;
format: 'mp4' | 'webm';
videoBitrate?: string; // e.g. '1000k'
audioBitrate?: string; // e.g. '128k'
size?: string; // e.g. '1920x1080' or '1280x?'
fps?: number;
}
export const presets: Record<string, VideoPreset> = {
'web-1080p': {
id: 'web-1080p',
description: '1080p Web-optimized MP4',
format: 'mp4',
videoBitrate: '2500k',
audioBitrate: '128k',
size: '1920x1080'
},
'web-720p': {
id: 'web-720p',
description: '720p Web-optimized MP4',
format: 'mp4',
videoBitrate: '1500k',
audioBitrate: '128k',
size: '1280x720'
},
'mobile-480p': {
id: 'mobile-480p',
description: '480p Mobile-friendly MP4',
format: 'mp4',
videoBitrate: '800k',
audioBitrate: '96k',
size: '854x480'
},
'webm-720p': {
id: 'webm-720p',
description: '720p WebM (VP9/Opus)',
format: 'webm',
videoBitrate: '1200k',
audioBitrate: '128k',
size: '1280x720'
},
'original': {
id: 'original',
description: 'Original Resolution',
format: 'mp4',
videoBitrate: '2500k',
audioBitrate: '128k'
}
};
export const getPreset = (name: string): VideoPreset => {
return presets[name] || presets['original'];
};

View File

@ -0,0 +1,71 @@
import ffmpeg from '@thedave42/fluent-ffmpeg';
import { logger } from './logger.js';
export interface VideoMetadata {
duration: number;
width: number;
height: number;
rotation: number;
aspectRatio: string;
fps: number;
stream: Record<string, any>;
}
export function probeVideo(filepath: string): Promise<VideoMetadata> {
return new Promise((resolve, reject) => {
ffmpeg.ffprobe(filepath, (err, metadata) => {
if (err) {
console.error(`DEBUG: probeVideo error`, err);
logger.error({ err }, 'ffprobe failed');
reject(err);
return;
}
console.log(`DEBUG: probeVideo success`, metadata?.format?.duration);
const format = metadata.format;
const stream = metadata.streams.find(s => s.codec_type === 'video');
if (!stream) {
reject(new Error('No video stream found'));
return;
}
const width = stream.width || 0;
const height = stream.height || 0;
const duration = format.duration || stream.duration || 0;
// Calculate Aspect Ratio or use display_aspect_ratio
let aspectRatio = stream.display_aspect_ratio;
if (!aspectRatio && width && height) {
const gcd = (a: number, b: number): number => b ? gcd(b, a % b) : a;
const divisor = gcd(width, height);
aspectRatio = `${width / divisor}:${height / divisor}`;
}
// FPS
let fps = 0;
if (stream.r_frame_rate) {
const parts = stream.r_frame_rate.split('/');
if (parts.length === 2) {
fps = parseInt(parts[0]) / parseInt(parts[1]);
} else {
fps = parseFloat(stream.r_frame_rate);
}
}
debugger
resolve({
duration: typeof duration === 'string' ? parseFloat(duration) : duration,
width,
height,
rotation: (stream.rotation as number) || 0,
aspectRatio: aspectRatio || '16:9',
fps,
stream
});
});
});
}

View File

@ -0,0 +1,101 @@
import ffmpeg from '@thedave42/fluent-ffmpeg';
import path from 'path';
import fs from 'fs/promises';
import { VideoJob, jobQueue } from './jobs.js';
import { getPreset } from './presets.js';
import { logger } from '../../commons/logger.js';
// Ensure ffmpeg is available in path or set it here if needed
// ffmpeg.setFfmpegPath('/path/to/ffmpeg');
const CACHE_DIR = path.join(process.cwd(), 'cache');
export async function processVideoJob(jobId: string) {
const job = await jobQueue.get(jobId);
if (!job) return;
await jobQueue.update(job.id, { status: 'processing', progress: 0 });
try {
const preset = getPreset(job.preset);
const outputFilename = `${job.id}.${preset.format}`;
const outputPath = path.join(CACHE_DIR, outputFilename);
// Ensure input exists
let inputPath = job.inputPath;
// If URL, we could stream it or download it first.
// fluent-ffmpeg handles URLs, but downloading first is safer for stability.
if (job.inputSource === 'url' && job.inputUrl) {
logger.info({ jobId, url: job.inputUrl }, 'Downloading video from URL');
const res = await fetch(job.inputUrl);
if (!res.ok) throw new Error(`Failed to download video: ${res.statusText}`);
const arrayBuffer = await res.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const tempFilename = `download_${job.id}.tmp`;
inputPath = path.join(CACHE_DIR, tempFilename);
await fs.writeFile(inputPath, buffer);
logger.info({ jobId, inputPath }, 'Download complete');
}
if (!inputPath) {
throw new Error('No input path or URL provided');
}
logger.info({ jobId, preset: preset.id }, 'Starting ffmpeg processing');
return new Promise<void>((resolve, reject) => {
let command = ffmpeg(inputPath);
// Apply Size
if (preset.size) {
command = command.size(preset.size);
}
// Apply FPS
if (preset.fps) {
command = command.fps(preset.fps);
}
// Apply Bitrates
if (preset.videoBitrate) {
command = command.videoBitrate(preset.videoBitrate);
}
if (preset.audioBitrate) {
command = command.audioBitrate(preset.audioBitrate);
}
let lastPercent = -1;
command
.on('start', (commandLine) => {
logger.info({ jobId, commandLine }, 'Spawned Ffmpeg');
})
.on('progress', (progress) => {
const percent = progress.percent ? Math.round(progress.percent) : 0;
if (percent > lastPercent + 5 || percent === 100) {
lastPercent = percent;
// logger.debug({ jobId, percent }, 'Progress update');
jobQueue.update(job.id, { progress: percent });
}
})
.on('error', (err) => {
logger.error({ jobId, err }, 'An error occurred processing video');
jobQueue.update(job.id, { status: 'failed', error: err.message }).then(() => reject(err));
})
.on('end', () => {
logger.info({ jobId }, 'Processing finished!');
jobQueue.update(job.id, { status: 'completed', progress: 100, resultPath: outputFilename }).then(() => resolve());
})
.save(outputPath);
});
} catch (err: any) {
logger.error({ jobId, err }, 'Fatal job error');
await jobQueue.update(job.id, { status: 'failed', error: err.message });
}
}

View File

@ -0,0 +1,239 @@
import { createLogRoutes } from '@/commons/log-routes-factory.js';
import { createRoute, z } from '@hono/zod-openapi';
export const { getRoute: getVideoLogsRoute, streamRoute: streamVideoLogsRoute } = createLogRoutes('Videos', '/api/videos/logs');
export const postVideoRoute = createRoute({
method: 'post',
path: '/api/videos',
request: {
query: z.object({
preset: z.string().optional().openapi({ example: 'web-720p' }),
url: z.string().optional().openapi({ example: 'https://example.com/video.mp4' })
}),
body: {
content: {
'multipart/form-data': {
schema: z.object({
file: z.any().optional().openapi({ type: 'string', format: 'binary' }),
})
},
'application/json': { // Also allow JSON body for URL only requests if easier (hono middleware handles it?) - Stick to multipart or query for now for consistency
schema: z.object({
url: z.string().optional()
})
}
}
}
},
responses: {
202: {
content: {
'application/json': {
schema: z.object({
jobId: z.string(),
preset: z.string(),
message: z.string()
})
}
},
description: 'Job Accepted'
}
}
});
export const uploadVideoRoute = createRoute({
method: 'post',
path: '/api/videos/upload',
request: {
query: z.object({
preset: z.string().optional().openapi({ example: 'web-720p' }),
url: z.string().optional().openapi({ example: 'https://example.com/video.mp4' })
}),
body: {
content: {
'multipart/form-data': {
schema: z.object({
file: z.any().optional().openapi({ type: 'string', format: 'binary' }),
})
}
}
}
},
responses: {
200: {
content: {
'application/json': {
schema: z.object({
jobId: z.string(),
dbId: z.string(),
videoUrl: z.string(),
thumbnailUrl: z.string(),
meta: z.object({
duration: z.number().optional(),
width: z.number().optional(),
height: z.number().optional(),
format: z.string().optional()
}).optional()
})
}
},
description: 'Video Uploaded and Database Entry Created'
}
}
});
export const getJobRoute = createRoute({
method: 'get',
path: '/api/videos/jobs/{id}',
request: {
params: z.object({
id: z.string()
})
},
responses: {
200: {
content: {
'application/json': {
schema: z.object({
id: z.string(),
status: z.string(),
progress: z.number(),
resultUrl: z.string().optional(),
error: z.string().optional()
})
}
},
description: 'Job Status'
}
}
});
export const getJobProgressRoute = createRoute({
method: 'get',
path: '/api/videos/jobs/{id}/progress',
request: {
params: z.object({
id: z.string().openapi({ param: { name: 'id', in: 'path' }, example: '123' })
})
},
responses: {
200: {
content: {
'text/event-stream': {
schema: z.string()
}
},
description: 'Job Progress SSE Stream'
},
404: {
description: 'Job not found'
}
}
});
export const downloadVideoRoute = createRoute({
method: 'get',
path: '/api/videos/jobs/{id}/download',
request: {
params: z.object({
id: z.string()
})
},
responses: {
200: {
description: 'Video File Stream'
},
404: {
description: 'Not Found'
}
}
});
export const getHlsPlaylistRoute = createRoute({
method: 'get',
path: '/api/videos/jobs/{id}/hls/playlist.m3u8',
request: {
params: z.object({
id: z.string()
})
},
responses: {
200: {
content: {
'application/vnd.apple.mpegurl': {
schema: z.string()
}
},
description: 'HLS Playlist'
},
404: {
description: 'Not Found'
}
}
});
export const getHlsSegmentRoute = createRoute({
method: 'get',
path: '/api/videos/jobs/{id}/hls/{segment}',
request: {
params: z.object({
id: z.string(),
segment: z.string()
})
},
responses: {
200: {
content: {
'video/MP2T': {
schema: z.any()
}
},
description: 'HLS Segment'
},
404: {
description: 'Not Found'
}
}
});
export const cancelJobRoute = createRoute({
method: 'delete',
path: '/api/videos/jobs/{id}',
request: {
params: z.object({
id: z.string()
})
},
responses: {
200: {
description: 'Job Cancelled'
},
404: {
description: 'Job Not Found'
}
}
});
export const proxyVideoRoute = createRoute({
method: 'get',
path: '/api/videos/proxy',
request: {
query: z.object({
url: z.string().openapi({ example: 'https://generativelanguage.googleapis.com/...' })
})
},
responses: {
200: {
description: 'Video Stream (Proxy)'
},
400: {
description: 'Missing URL'
},
500: {
description: 'Proxy Error'
}
}
});

View File

@ -0,0 +1,24 @@
import ffmpeg from '@thedave42/fluent-ffmpeg';
import { logger } from './logger.js';
import path from 'path';
export function extractThumbnail(videoPath: string, outputDir: string, filename: string): Promise<string> {
return new Promise((resolve, reject) => {
const outputPath = path.join(outputDir, filename);
ffmpeg(videoPath)
.screenshots({
timestamps: [0],
filename: filename,
folder: outputDir,
size: '640x?'
})
.on('end', () => {
resolve(outputPath);
})
.on('error', (err) => {
logger.error({ err }, 'Thumbnail generation failed');
reject(err);
});
});
}

View File

@ -0,0 +1,166 @@
import { Job } from 'pg-boss';
import { AbstractWorker } from '../../jobs/boss/AbstractWorker.js';
import { Worker } from '../../commons/decorators.js';
import ffmpeg from '@thedave42/fluent-ffmpeg';
import path from 'path';
import fs from 'fs/promises';
import { getPreset } from './presets.js';
import { logger } from './logger.js';
import { EventEmitter } from 'events';
export const VIDEO_JOB_NAME = 'video-processing';
const STORAGE_DIR = path.resolve(process.cwd(), process.env.VIDEO_STORE || 'videos');
const CACHE_DIR = path.join(process.cwd(), 'cache'); // Keep cache for temp inputs
export interface VideoJobData {
inputSource: 'file' | 'url';
inputPath?: string;
inputUrl?: string;
preset: string;
}
@Worker(VIDEO_JOB_NAME)
export class VideoWorker extends AbstractWorker<VideoJobData> {
readonly queueName = VIDEO_JOB_NAME;
public events?: EventEmitter;
private activeCommands = new Map<string, ffmpeg.FfmpegCommand>();
calculateCost(job: Job<VideoJobData>, result: any): number {
// Simple cost for now, maybe dependent on duration later
return 1;
}
protected async process(job: Job<VideoJobData>) {
const { id, data } = job;
const preset = getPreset(data.preset);
// Output directory for this job
const jobDir = path.join(STORAGE_DIR, id);
const outputFilename = 'playlist.m3u8';
const outputPath = path.join(jobDir, outputFilename);
let inputPath = data.inputPath;
// Ensure directories exist
try {
await fs.mkdir(CACHE_DIR, { recursive: true });
await fs.mkdir(STORAGE_DIR, { recursive: true });
await fs.mkdir(jobDir, { recursive: true }); // Create job specific dir
} catch (err) { }
// Handle URL download if needed
if (data.inputSource === 'url' && data.inputUrl) {
logger.info({ jobId: id, url: data.inputUrl }, 'Downloading video from URL');
const res = await fetch(data.inputUrl);
if (!res.ok) throw new Error(`Failed to download video: ${res.statusText}`);
const arrayBuffer = await res.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const tempFilename = `download_${id}.tmp`;
inputPath = path.join(CACHE_DIR, tempFilename);
await fs.writeFile(inputPath, buffer);
logger.info({ jobId: id, inputPath }, 'Download complete');
}
if (!inputPath) {
throw new Error('No input path or URL provided');
}
logger.info({ jobId: id, preset: preset.id }, 'Starting ffmpeg HLS processing');
await new Promise<void>((resolve, reject) => {
let command = ffmpeg(inputPath!);
this.activeCommands.set(id, command);
// HLS Options
// ffmpeg -i input.mp4 -b:v 1M -g 60 -hls_time 2 -hls_list_size 0 -hls_segment_size 500000 output.m3u8
const threads = process.env.FFMPEG_THREADS || '1';
let presetName = process.env.FFMPEG_PRESET || 'copy';
if (presetName === 'copy') {
logger.info({ jobId: id }, 'Using COPY mode (no transcode)');
command = command
.outputOptions([
'-c copy',
// HLS flags for copy mode
'-hls_time 10', // Target 10s segments, but will split on keyframes
'-hls_list_size 0',
'-f hls'
]);
} else {
// Apply Size only if encoding
if (preset.size) {
command = command.size(preset.size);
}
command = command
.outputOptions([
`-threads ${threads}`,
`-preset ${presetName}`,
'-b:v 1M',
'-g 60',
'-hls_time 300',
'-hls_list_size 0',
'-hls_segment_size 500000'
]);
}
command
.on('start', (commandLine) => {
logger.info({ jobId: id, commandLine }, 'Spawned Ffmpeg');
})
.on('progress', (progress) => {
if (this.events) {
const p = progress.percent || 0;
this.events.emit(`progress:${id}`, { progress: p });
}
})
.on('stderr', (stderrLine) => {
if (this.events) {
this.events.emit(`progress:${id}`, { log: stderrLine });
}
})
.on('error', (err) => {
// Check if it was killed
if (err.message.includes('SIGKILL')) {
logger.info({ jobId: id }, 'Process killed');
} else {
logger.error({ jobId: id, err }, 'An error occurred processing video');
}
this.activeCommands.delete(id);
reject(err);
})
.on('end', () => {
logger.info({ jobId: id }, 'Processing finished!');
this.activeCommands.delete(id);
resolve();
})
.save(outputPath);
});
const result = {
success: true,
resultPath: `${id}/${outputFilename}`, // Relative path to storage root
preset: preset.id
};
logger.info({ jobId: id, result }, 'Worker finished processing');
return result;
}
public cancelJob(jobId: string): boolean {
const command = this.activeCommands.get(jobId);
if (command) {
try {
command.kill('SIGKILL');
this.activeCommands.delete(jobId);
logger.info({ jobId: jobId }, 'Job cancelled via kill');
return true;
} catch (err) {
logger.error({ jobId: jobId, err }, 'Failed to kill command');
}
}
return false;
}
}

View File

@ -0,0 +1,4 @@
import { z } from 'zod';
import { extendZodWithOpenApi } from '@hono/zod-openapi';
extendZodWithOpenApi(z);