mono/packages/media/cpp/ref/images/index.ts
2026-04-12 22:38:43 +02:00

1195 lines
52 KiB
TypeScript

import { Context } from 'hono';
import { AbstractProduct } from '../AbstractProduct.js';
import { postImageRoute, getImageRoute, postResponsiveImageRoute, getResponsiveImageRoute, getImageLogsRoute, streamImageLogsRoute, renderImageRoute, postTransformRoute } from './routes.js';
import { createLogHandlers } from '../../commons/log-routes-factory.js';
import { CachedHandler } from '../../commons/decorators.js';
import sharp from 'sharp';
import fs from 'fs/promises';
import path from 'path';
import { createHash } from 'crypto';
import { logger } from './logger.js';
import { getPresets } from './presets.js';
import { hasWorker, dispatchToWorker } from '../../commons/worker-ipc.js';
import { requireAuth } from '../storage/api/vfs-auth.js';
import { createVFS, resolveMountByName, SYSTEM_MOUNT_OWNER_ID, type IExtendedMount } from '../storage/api/vfs-core.js';
import { ensureVfsSettings, saveVfsSettings } from '../storage/api/acl-helpers.js';
import { readVfsFileBuffer } from '../storage/api/vfs-read.js';
import 'dotenv/config';
const CACHE_DIR = path.join(process.cwd(), 'cache');
const CACHE_TTL = 31536000; // 1 year
// Map to track ongoing fetch requests by URL to prevent thundering herd
const ongoingFetches = new Map<string, Promise<Buffer>>();
// Map to track ongoing render operations by hash key — prevents concurrent
// processing (and concurrent writes to the same cache file) for identical variants
const ongoingRenders = new Map<string, Promise<Buffer>>();
// Server-side concurrency limiter for Sharp / libvips work.
// Sharp uses the libuv thread pool (UV_THREADPOOL_SIZE, default 4) — many overlapping
// encodes still compete for CPU and can starve other HTTP handlers on the same process.
// Tune IMAGE_ENCODE_MAX_CONCURRENT (default 2) so the event loop still gets turns; raise
// UV_THREADPOOL_SIZE in the process env only if you raise this and still see backlog.
// IMAGE_ENCODE_YIELD_MS — pause before Sharp (default 50ms, minimum 50) so other requests run.
const MAX_CONCURRENT_ENCODES = Math.max(
1,
Math.min(32, parseInt(process.env.IMAGE_ENCODE_MAX_CONCURRENT || '2', 10) || 2),
);
const ENCODE_YIELD_MS = Math.max(
50,
parseInt(process.env.IMAGE_ENCODE_YIELD_MS || '50', 10) || 50,
);
let _encodeActive = 0;
const _encodeQueue: (() => void)[] = [];
function _encodeQueueStats() {
return {
encodeActive: _encodeActive,
encodeWaiting: _encodeQueue.length,
encodeMax: MAX_CONCURRENT_ENCODES,
};
}
/** Delay before Sharp so the event loop can flush I/O and other handlers (min 50ms). */
function yieldBeforeEncode(): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ENCODE_YIELD_MS));
}
function _acquireEncode(reqId?: string): Promise<{ waitMs: number }> {
const t0 = performance.now();
if (_encodeActive < MAX_CONCURRENT_ENCODES) {
_encodeActive++;
return Promise.resolve({ waitMs: 0 });
}
const stats = _encodeQueueStats();
logger.info(
{ reqId, ...stats, note: 'encode queue — other /api work can still be accepted; this request waits' },
'encode: waiting for slot',
);
return new Promise<{ waitMs: number }>((resolve) => {
_encodeQueue.push(() => {
const waitMs = performance.now() - t0;
if (waitMs > 50 || _encodeQueue.length > 0) {
logger.info({ reqId, waitMs: Math.round(waitMs), ..._encodeQueueStats() }, 'encode: slot granted');
}
resolve({ waitMs });
});
});
}
function _releaseEncode(): void {
if (_encodeQueue.length > 0) {
_encodeQueue.shift()!();
} else {
_encodeActive--;
}
}
/**
* Try to read a VFS file directly (no HTTP) when the URL points to this server.
* Returns the Buffer or null if the URL isn't a local VFS path.
*/
async function tryLocalVfsRead(url: string): Promise<Buffer | null> {
const baseUrl = (process.env.SERVER_IMAGE_API_URL || process.env.SERVER_URL_R || '').replace(/\/$/, '');
if (!baseUrl || !url.startsWith(baseUrl)) return null;
const remainder = url.slice(baseUrl.length);
const vfsMatch = remainder.match(/^\/api\/vfs\/get\/([^/]+)\/(.+)$/);
if (!vfsMatch) return null;
const mountName = decodeURIComponent(vfsMatch[1]);
const subpath = decodeURIComponent(vfsMatch[2]);
try {
const buf = await readVfsFileBuffer(mountName, subpath);
logger.info({ mountName, subpath, sizeKB: Math.round(buf.length / 1024) }, 'fetch: local VFS (no HTTP)');
return buf;
} catch (err: any) {
if (err.code === 'ENOENT') {
logger.warn({ mountName, subpath }, 'Local VFS read: file not found');
} else {
logger.error({ err, mountName, subpath }, 'Local VFS read failed, falling back to HTTP');
}
return null;
}
}
/**
* Sanitize and validate an image URL.
* Strips markdown artifacts, null bytes, and validates protocol/credentials.
* Returns the cleaned URL or an error string starting with "ERR:".
*/
export function sanitizeImageUrl(raw: string): string {
let url = raw;
// Strip null bytes
url = url.replace(/\0/g, '');
// Strip markdown image syntax: ![alt](url) → url
url = url.replace(/^!\[.*?\]\((.+?)\)$/, '$1');
// Strip markdown link syntax: [text](url) → url
url = url.replace(/^\[.*?\]\((.+?)\)$/, '$1');
// Strip angle brackets / parens / whitespace
url = url.trim().replace(/^[<(]+|[>)]+$/g, '');
// Length cap
if (url.length > 2048) return 'ERR:URL too long';
let parsed: URL;
try {
parsed = new URL(url);
} catch {
return `ERR:Invalid URL: ${url}`;
}
// Protocol whitelist
if (!['http:', 'https:'].includes(parsed.protocol)) {
return `ERR:Invalid URL protocol: ${parsed.protocol}`;
}
// Block embedded credentials
if (parsed.username || parsed.password) {
return 'ERR:URLs with credentials are not allowed';
}
// Block cloud metadata endpoints (SSRF)
const hostname = parsed.hostname.toLowerCase();
if (
hostname === '169.254.169.254' ||
hostname === 'metadata.google.internal'
) {
return 'ERR:URL points to a blocked address';
}
return url;
}
/**
* Ensures an image variant is cached.
* This function will:
* 1. Generate the hash/filename for the requested variant
* 2. Check if it exists in cache
* 3. If not, dispatch to a worker to process it (or process inline if no workers)
* 4. Return the filename
*/
export async function _ensureCachedImage(inputBuffer: Buffer, width?: number, height?: number, format = 'jpeg', fit = 'inside'): Promise<{ filename: string, hit: boolean }> {
const hash = createHash('sha256')
.update(inputBuffer)
.update(`w${width}h${height}f${format}`)
.digest('hex');
const filename = `${hash}.${format}`;
const filepath = path.join(CACHE_DIR, filename);
// 1. Check if it already exists
try {
await fs.access(filepath);
return { filename, hit: true }; // Cache hit
} catch {
// Cache miss
}
// 2. Process
if (await hasWorker('images')) {
// Create a copy of the buffer into a regular ArrayBuffer because SharedArrayBuffer isn't officially supposed to be sent this way
const arrayBuffer = new ArrayBuffer(inputBuffer.length);
const view = new Uint8Array(arrayBuffer);
view.set(inputBuffer);
await dispatchToWorker('images', 'process_image', {
buffer: arrayBuffer, width, height, format, fit
}, [arrayBuffer]); // transfer memory
} else {
// Inline fallback
const pipeline = sharp(inputBuffer);
if (width || height) {
pipeline.resize({
width: width,
height: height,
withoutEnlargement: true,
fit: fit as keyof sharp.FitEnum
});
}
// Speed-tuned format options — avif defaults to effort:4 which is extremely slow
const formatOpts: Record<string, any> = {
avif: { effort: 2 },
webp: { effort: 4 },
};
pipeline.toFormat(format as keyof sharp.FormatEnum, formatOpts[format] || {});
const processedBuffer = await pipeline.toBuffer();
await fs.writeFile(filepath, processedBuffer);
}
return { filename, hit: false };
}
/**
* Coalesced fetch helper to prevent multiple concurrent requests for the same URL.
*/
async function fetchImageCoalesced(url: string): Promise<Buffer> {
// Fast path: read local VFS files directly (no HTTP round-trip)
const localBuf = await tryLocalVfsRead(url);
if (localBuf) return localBuf;
if (ongoingFetches.has(url)) {
logger.debug({ url }, 'fetch: coalescing (joining existing request)');
return ongoingFetches.get(url)!;
}
const fetchPromise = (async () => {
const controller = new AbortController();
const timeoutMs = process.env.IMAGE_FETCH_TIMEOUT_MS ? parseInt(process.env.IMAGE_FETCH_TIMEOUT_MS) : 10000;
const timeout = setTimeout(() => controller.abort(), timeoutMs);
try {
const fetchRes = await fetch(url, { signal: controller.signal });
clearTimeout(timeout);
if (!fetchRes.ok) {
logger.error({
msg: 'Failed to fetch URL',
url,
status: fetchRes.status,
statusText: fetchRes.statusText,
headers: Object.fromEntries(fetchRes.headers.entries())
});
throw new Error(`Failed to fetch URL: ${fetchRes.statusText} : ${url}`);
}
const arrayBuffer = await fetchRes.arrayBuffer();
return Buffer.from(arrayBuffer);
} catch (err) {
clearTimeout(timeout);
throw err;
}
})();
ongoingFetches.set(url, fetchPromise);
// Clean up AFTER the full body is consumed (not just after headers)
const cleanup = () => { ongoingFetches.delete(url); };
void fetchPromise.then(cleanup, cleanup);
return fetchPromise;
}
/**
* Backward-compat for legacy picture thumbnail URLs:
* /api/images/cache/<pictureId>_thumb.jpg
* If such cache entries are missing, resolve the current picture.image_url from DB.
*/
async function resolveLegacyPictureThumbUrl(rawUrl: string): Promise<string> {
try {
const parsed = new URL(rawUrl);
const match = parsed.pathname.match(/\/api\/images\/cache\/([0-9a-fA-F-]{36})_thumb\.jpg$/);
if (!match) return rawUrl;
const pictureId = match[1];
const { fetchPictureImageUrlByIdPg } = await import('./db-images-pg.js');
const nextUrl = await fetchPictureImageUrlByIdPg(pictureId);
if (nextUrl && typeof nextUrl === 'string') {
logger.debug({ rawUrl, nextUrl, pictureId }, 'Resolved legacy thumb URL to picture.image_url');
return nextUrl;
}
return rawUrl;
} catch {
return rawUrl;
}
}
export async function ensureCachedImage(inputBuffer: Buffer, width: number, height: number | undefined, format: string): Promise<string> {
const { filename } = await _ensureCachedImage(inputBuffer, width, height, format);
return filename;
}
export async function _ensureCachedImageFromUrl(url: string, width: number | undefined, height: number | undefined, format: string): Promise<{ filename: string; hit: boolean }> {
let inputBuffer: Buffer;
try {
const urlHash = createHash('sha256').update(url).digest('hex');
const sourceFilename = `source_${urlHash}`;
const sourcePath = path.join(CACHE_DIR, sourceFilename);
try {
inputBuffer = await fs.readFile(sourcePath);
} catch {
inputBuffer = await fetchImageCoalesced(url);
await fs.writeFile(sourcePath, inputBuffer).catch(e => {
logger.error({ err: e }, 'Failed to write source cache');
});
}
} catch (err: any) {
throw new Error(`Failed to ensure cached image from url ${url} : ${err.message}`);
}
return await _ensureCachedImage(inputBuffer, width, height, format);
}
export async function ensureCachedImageFromUrl(url: string, width: number | undefined, height: number | undefined, format: string): Promise<string> {
const { filename } = await _ensureCachedImageFromUrl(url, width, height, format);
return filename;
}
export class ImagesProduct extends AbstractProduct<any> {
id = 'images';
jobOptions = {};
actions = {};
workers = [];
routes!: any[];
constructor() {
super();
const { getHandler, streamHandler } = createLogHandlers(path.join(process.cwd(), 'logs', 'images.json'));
this.routes = [
{ definition: postImageRoute, handler: this.handlePostImage.bind(this) },
{ definition: getImageRoute, handler: this.handleGetImage.bind(this) },
{ definition: postResponsiveImageRoute, handler: this.handlePostResponsive.bind(this) },
{ definition: getResponsiveImageRoute, handler: CachedHandler(this.handleGetResponsive.bind(this), { ttl: 300, skipAuth: true }) },
{ definition: getImageLogsRoute, handler: getHandler },
{ definition: streamImageLogsRoute, handler: streamHandler },
{ definition: renderImageRoute, handler: this.handleRenderImage.bind(this) },
{ definition: postTransformRoute, handler: this.handleTransformImage.bind(this) }
];
// Public endpoint registration is now handled by the Public() decorator in createRouteBody
}
async onStart() {
// Ensure cache directory exists
try {
await fs.mkdir(CACHE_DIR, { recursive: true });
} catch (err) {
logger.error({ err }, 'Failed to create cache directory');
}
}
hash(data: any): string {
return 'images-hash';
}
meta(userId: string): any {
return { userId };
}
async handleJob(action: string, msg: any): Promise<any> {
if (action === 'process_image') {
const { buffer, width, height, format, fit } = msg;
const inputBuffer = Buffer.from(buffer);
const hash = createHash('sha256')
.update(inputBuffer)
.update(`w${width}h${height}f${format}`)
.digest('hex');
const filename = `${hash}.${format}`;
const filepath = path.join(CACHE_DIR, filename);
await this.performProcessImage(inputBuffer, filepath, { width, height, format, fit });
return { filename };
}
if (action === 'render_image') {
const { buffer, url, width, height, format, square, contain } = msg;
const inputBuffer = Buffer.from(buffer);
const hashKey = createHash('sha256')
.update(url)
.update(`w${width}h${height}f${format}${square ? 'sq' : ''}${contain ? 'ct' : ''}`)
.digest('hex');
const filename = `${hashKey}.${format}`;
const filepath = path.join(CACHE_DIR, filename);
await this.performRenderImage(inputBuffer, filepath, { width, height, format, square, contain });
return { filename };
}
return super.handleJob(action, msg);
}
private async performProcessImage(inputBuffer: Buffer, filepath: string, options: { width?: number, height?: number, format: string, fit: keyof sharp.FitEnum }) {
const pipeline = sharp(inputBuffer);
if (options.width || options.height) {
pipeline.resize({
width: options.width,
height: options.height,
withoutEnlargement: true,
fit: options.fit
});
}
// Speed-tuned format options — avif defaults to effort:4 which is extremely slow
const formatOpts: Record<string, any> = {
avif: { effort: 2 },
webp: { effort: 4 },
};
pipeline.toFormat(options.format as keyof sharp.FormatEnum, formatOpts[options.format] || {});
const processedBuffer = await pipeline.toBuffer();
await fs.writeFile(filepath, processedBuffer);
}
private async performRenderImage(inputBuffer: Buffer, filepath: string, options: { width?: number, height?: number, format: string, square: boolean, contain: boolean }): Promise<Buffer> {
let pipeline = sharp(inputBuffer);
const bgColor = { r: 255, g: 255, b: 255, alpha: 1 };
if (options.square && options.width) {
pipeline = pipeline.resize({
width: options.width,
height: options.width,
fit: 'cover',
withoutEnlargement: true,
});
} else if (options.contain && (options.width || options.height)) {
pipeline = pipeline.resize({
width: options.width,
height: options.height,
fit: 'contain',
background: bgColor,
withoutEnlargement: true,
});
} else if (options.width || options.height) {
pipeline = pipeline.resize({
width: options.width,
height: options.height,
withoutEnlargement: true,
fit: 'inside'
});
}
// Speed-tuned format options — avif defaults to effort:4 which is extremely slow
const formatOpts: Record<string, any> = {
avif: { effort: 2 },
webp: { effort: 4 },
};
pipeline = pipeline.toFormat(options.format as keyof sharp.FormatEnum, formatOpts[options.format] || {});
const processedBuffer = await pipeline.toBuffer();
// Atomic write: temp file then rename to prevent corrupt reads from concurrent requests
const tmpPath = `${filepath}.${process.pid}.${Date.now()}.tmp`;
await fs.writeFile(tmpPath, processedBuffer);
await fs.rename(tmpPath, filepath);
return processedBuffer;
}
async handlePostImage(c: Context) {
const start = performance.now();
try {
const body = await c.req.parseBody();
const file = body['file'];
const presets = getPresets();
const presetName = c.req.query('preset');
const forward = c.req.query('forward') || process.env.IMAGE_UPLOAD_TARGET || '';
const useCache = c.req.query('cache') !== 'false';
const isOriginal = c.req.query('original') === 'true';
let preset: any = {};
if (presetName && presets[presetName]) {
preset = presets[presetName];
}
if (!(file instanceof File)) {
return c.text('No file uploaded', 400);
}
const buffer = await file.arrayBuffer();
const inputBuffer = Buffer.from(buffer);
// Detect metadata for defaults
const meta = await sharp(inputBuffer).metadata();
// Extract EXIF from original input buffer before Sharp processing strips metadata.
const { extractImageMetadata } = await import('./metadata.js');
const inputExifMeta = await extractImageMetadata(inputBuffer);
// Precedence: Explicit > Preset > Default (Original Format / 2048px)
const width = body['width'] ? parseInt(body['width'] as string) : (preset['width'] || (isOriginal ? undefined : 2048));
const height = body['height'] ? parseInt(body['height'] as string) : (preset['height'] || undefined);
const format = (body['format'] as string) || (preset['format'] || meta.format || 'jpeg');
const fit = (preset['fit'] || 'inside') as keyof sharp.FitEnum;
// Generate hash for filename based on content + params
const hash = createHash('sha256')
.update(inputBuffer)
.update(`w${width}h${height}f${format}`)
.digest('hex');
const filename = `${hash}.${format}`;
const filepath = path.join(CACHE_DIR, filename);
let processedBuffer: Buffer | null = null;
// 1. Try Cache
let cacheHit = false;
if (useCache) {
try {
processedBuffer = await fs.readFile(filepath);
cacheHit = true;
logger.debug({ filename }, 'Image cache hit - read from disk');
} catch {
// Not found in cache
}
}
let workerOffload = false;
if (!processedBuffer) {
// 2. Process if no cache
logger.debug({ filename }, 'Image cache miss - processing');
if (await hasWorker('images')) {
workerOffload = true;
logger.debug({ filename }, 'Offloading image processing to worker thread');
const res = await dispatchToWorker('images', 'process_image', {
buffer, width, height, format, fit
}, [buffer]); // Zero-copy transfer of the ArrayBuffer
// The worker wrote to cache, load it for Supabase forwarding later
processedBuffer = await fs.readFile(filepath);
} else {
// Inline fallback if worker failed or workers=0
await this.performProcessImage(inputBuffer, filepath, { width, height, format, fit });
processedBuffer = await fs.readFile(filepath);
}
}
if (!processedBuffer) {
throw new Error('Image processing failed to produce buffer');
}
// --- 1. VFS FORWARDING ---
if (forward === 'vfs') {
try {
const user = await requireAuth(c);
if (!user?.id) {
return c.json({ error: 'Unauthorized' }, 401);
}
const mountName = process.env.IMAGE_VFS_STORE || 'images';
const resolved = resolveMountByName(mountName, user.id);
if (!resolved) {
return c.json({ error: `VFS mount '${mountName}' not found` }, 404);
}
const subdir = user.id;
const storageFilename = filename;
// Ensure least-privilege per-user grant exists on shared mount before writing as the user.
const mountPath = path.resolve((resolved.mount as IExtendedMount).path);
const settingsOwner = resolved.ownerId || SYSTEM_MOUNT_OWNER_ID;
const settings = await ensureVfsSettings(mountPath, settingsOwner);
const grantPath = `/${subdir}`;
const needsGrant = !settings.acl.some((entry: any) => {
const entryPath = ((entry?.path || '/').startsWith('/') ? entry.path : `/${entry.path}`);
return entry.userId === user.id && entryPath === grantPath;
});
if (needsGrant) {
settings.acl.push({
path: grantPath,
permissions: ['read', 'mkdir', 'write'],
userId: user.id,
} as any);
await saveVfsSettings(mountPath, settings);
}
const ownerId = resolved.ownerId || SYSTEM_MOUNT_OWNER_ID;
const vfs = await createVFS(resolved.mount as IExtendedMount, ownerId, user.id);
await vfs.mkdir(subdir, { recursive: true });
await vfs.writefile(`${subdir}/${storageFilename}`, processedBuffer);
const serverBase = (process.env.IMAGE_VFS_URL || process.env.SERVER_IMAGE_API_URL_R || process.env.SERVER_URL_R || 'https://service.polymech.info').replace(/\/$/, '');
const publicUrl = `${serverBase}/api/vfs/get/${encodeURIComponent(mountName)}/${encodeURIComponent(subdir)}/${encodeURIComponent(storageFilename)}`;
const sharpMeta = await sharp(processedBuffer).metadata();
logger.info({
mountName,
subdir,
storageFilename,
bufferSize: processedBuffer.length,
format,
}, 'Uploaded image to VFS storage');
return c.json({
url: publicUrl,
width: sharpMeta.width,
height: sharpMeta.height,
format: sharpMeta.format,
size: sharpMeta.size,
filename: `${subdir}/${storageFilename}`,
meta: inputExifMeta,
});
} catch (err: any) {
logger.error({ err }, 'VFS forwarding error');
return c.json({ error: err?.message || 'Failed to upload to VFS storage' }, 502);
}
}
// --- 2. SUPABASE FORWARDING ---
if (forward === 'supabase') {
try {
// Check env vars before import
if (!process.env.SUPABASE_URL || !process.env.SUPABASE_SERVICE_KEY) {
throw new Error('Missing Supabase credentials in server environment');
}
const bucket = process.env.SUPABASE_BUCKET || 'pictures';
const storagePath = `cache/${filename}`;
const supabaseUrl = process.env.SUPABASE_URL;
const supabaseKey = process.env.SUPABASE_SERVICE_KEY;
logger.info({
bucket,
storagePath,
bufferSize: processedBuffer.length,
format,
}, 'Uploading to Supabase storage (direct fetch)');
// Direct fetch to Supabase Storage REST API
// Bypasses @supabase/storage-js SDK which has opaque error handling with Node.js Buffers
const uploadUrl = `${supabaseUrl}/storage/v1/object/${bucket}/${storagePath}`;
const uploadRes = await fetch(uploadUrl, {
method: 'POST',
headers: {
'Authorization': `Bearer ${supabaseKey}`,
'apikey': supabaseKey,
'Content-Type': `image/${format}`,
'x-upsert': 'true',
'cache-control': 'max-age=3600',
},
body: new Uint8Array(processedBuffer),
});
if (!uploadRes.ok) {
const errorBody = await uploadRes.text();
logger.error({
status: uploadRes.status,
statusText: uploadRes.statusText,
body: errorBody.substring(0, 500),
bucket,
storagePath,
}, 'Supabase storage upload failed');
return c.json({
error: 'Failed to upload to external storage',
details: errorBody.substring(0, 500),
status: uploadRes.status,
}, 502);
}
// Construct public URL (same logic as SDK's getPublicUrl)
const publicUrl = `${supabaseUrl}/storage/v1/object/public/${bucket}/${storagePath}`;
const sharpMeta = await sharp(processedBuffer).metadata();
return c.json({
url: publicUrl,
width: sharpMeta.width,
height: sharpMeta.height,
format: sharpMeta.format,
size: sharpMeta.size,
filename: storagePath,
meta: inputExifMeta
});
} catch (err: any) {
logger.error({ err }, 'Supabase forwarding error');
const status = err.message.includes('Missing Supabase') ? 500 : 502;
return c.json({ error: err.message }, status as any);
}
}
// --- 3. LOCAL CACHING (Handled above) ---
// processedBuffer is already written to cache if it was processed, or read from cache if hit.
const elapsed = performance.now() - start;
const ss = Math.floor(elapsed / 1000);
const ms = Math.floor(elapsed % 1000);
const duration = `${ss}:${ms.toString().padStart(3, '0')}`;
const url = `/api/images/cache/${filename}`;
logger.info({ url, duration, cacheHit, workerOffload, format, width, height }, 'handlePostImage complete');
return c.redirect(url, 303);
} catch (err: any) {
logger.error({ err }, 'Image processing failed');
return c.text(err.message, 500);
}
}
async handleGetImage(c: Context) {
const filename = c.req.param('filename');
if (!filename) return c.text('Filename required', 400);
// Sanitize filename to prevent directory traversal
const safeFilename = path.basename(filename);
const filepath = path.join(CACHE_DIR, safeFilename);
try {
const content = await fs.readFile(filepath);
// Infer mimetype from extension logic or just 'application/octet-stream' / specific type
// Basic inference:
const ext = path.extname(safeFilename).slice(1);
let mime = 'application/octet-stream';
if (['jpg', 'jpeg'].includes(ext)) mime = 'image/jpeg';
else if (ext === 'png') mime = 'image/png';
else if (ext === 'webp') mime = 'image/webp';
else if (ext === 'gif') mime = 'image/gif';
else if (ext === 'avif') mime = 'image/avif';
c.header('Content-Type', mime);
c.header('Cache-Control', `public, max-age=${CACHE_TTL}, immutable`);
return c.body(content);
} catch (err) {
return c.text('Not found', 404);
}
}
async handlePostResponsive(c: Context) {
const start = performance.now();
const variantStats: any[] = [];
try {
const body = await c.req.parseBody();
const file = body['file'];
const url = body['url'] as string;
const sizesJson = body['sizes'] as string;
const formatsJson = body['formats'] as string;
let inputBuffer: Buffer;
if (file instanceof File) {
const buffer = await file.arrayBuffer();
inputBuffer = Buffer.from(buffer);
} else if (url) {
const resolvedUrl = await resolveLegacyPictureThumbUrl(url);
const urlHash = createHash('sha256').update(resolvedUrl).digest('hex');
const sourceFilename = `source_${urlHash}`;
const sourcePath = path.join(CACHE_DIR, sourceFilename);
try {
inputBuffer = await fs.readFile(sourcePath);
} catch {
inputBuffer = await fetchImageCoalesced(resolvedUrl);
// Cache the source image
await fs.writeFile(sourcePath, inputBuffer).catch(e => {
logger.error({ err: e }, 'Failed to write source cache');
});
}
} else {
return c.text('No file or URL provided', 400);
}
// Defaults
const sizes: number[] = sizesJson ? JSON.parse(sizesJson) : [180, 640, 1024, 2048];
const formats: string[] = formatsJson ? JSON.parse(formatsJson) : ['avif', 'webp'];
const meta = await sharp(inputBuffer).metadata();
const originalFormat = meta.format || 'jpeg';
// Allow original format in output if requested or implicit
const targetFormats = formats.map(f => f === 'original' || f === 'jpg' ? (originalFormat === 'jpeg' ? 'jpeg' : originalFormat) : f);
// Deduplicate
const uniqueFormats = [...new Set(targetFormats)];
const uniqueSizes = [...new Set(sizes)].sort((a, b) => a - b);
const sources: { srcset: string; type: string }[] = [];
let fallbackSrc = '';
let fallbackWidth = 0;
let fallbackHeight = 0;
let fallbackFormat = '';
// Generate all variants
for (const format of uniqueFormats) {
const srcSetParts: string[] = [];
for (const width of uniqueSizes) {
const variantStart = performance.now();
let filename;
const baseUrl = process.env.SERVER_IMAGE_API_URL || 'https://service.polymech.info';
const LAZY_THRESHOLD = 600;
const isLazy = url && width > LAZY_THRESHOLD;
if (isLazy) {
// LAZY GENERATION: Return Dynamic Render URL
// Do NOT process large images eagerly if we have a URL source
const renderUrl = `${baseUrl}/api/images/render?url=${encodeURIComponent(url)}&width=${width}&format=${format}`;
srcSetParts.push(`${renderUrl} ${width}w`);
variantStats.push({ width, format, lazy: true, duration: performance.now() - variantStart });
// For fallback calculation, we assume the requested width is what we get
// We skip reading meta/file access
if (!fallbackSrc || (format === 'jpeg' && fallbackFormat !== 'jpeg') || (width > fallbackWidth && format === fallbackFormat)) {
fallbackSrc = renderUrl;
fallbackWidth = width;
fallbackHeight = 0; // Unknown height without checking, but it's lazy
fallbackFormat = format;
}
continue;
}
logger.debug(`Ensure image cached : ${url}`)
// EAGER GENERATION
try {
const res = await _ensureCachedImage(inputBuffer, width, undefined, format);
filename = res.filename;
variantStats.push({ width, format, lazy: false, hit: res.hit, duration: performance.now() - variantStart });
} catch (e: any) {
logger.error({ err: e }, 'Failed to cache image variant');
variantStats.push({ width, format, error: e.message, duration: performance.now() - variantStart });
continue;
}
const cachedUrl = `${baseUrl}/api/images/cache/${filename}`;
srcSetParts.push(`${cachedUrl} ${width}w`);
// Update fallback to the largest version of the first format (or preferred format)
if (!fallbackSrc || (format === 'jpeg' && fallbackFormat !== 'jpeg') || (width > fallbackWidth && format === fallbackFormat)) {
fallbackSrc = cachedUrl;
fallbackWidth = width; // Use requested width as nominal fallback width
fallbackFormat = format;
}
}
sources.push({
srcset: srcSetParts.join(', '),
type: `image/${format}`
});
}
const totalDuration = performance.now() - start;
const seconds = Math.floor(totalDuration / 1000);
const ms = Math.floor(totalDuration % 1000);
const durationFormatted = `${seconds}:${ms.toString().padStart(3, '0')}`;
const performanceStats = {
totalDuration: durationFormatted,
variants: variantStats.map(v => {
const vSeconds = Math.floor(v.duration / 1000);
const vMs = Math.floor(v.duration % 1000);
return { ...v, duration: `${vSeconds}:${vMs.toString().padStart(3, '0')}` };
}),
url: url ? url : 'file-upload'
};
logger.debug({
msg: 'Responsive image generation complete',
performance: performanceStats
});
return c.json({
img: {
src: fallbackSrc,
width: fallbackWidth,
height: fallbackHeight,
format: fallbackFormat
},
sources,
stats: performanceStats
});
} catch (err: any) {
logger.error({ err }, 'Responsive image generation failed');
return c.text(err.message, 500);
}
}
async handleGetResponsive(c: Context) {
const start = performance.now();
const variantStats: any[] = [];
try {
const rawUrl = c.req.query('url');
if (!rawUrl) return c.text('URL required', 400);
const url = sanitizeImageUrl(rawUrl);
if (url.startsWith('ERR:')) return c.text(url.slice(4), 400);
const sourceUrl = await resolveLegacyPictureThumbUrl(url);
const sizesJson = c.req.query('sizes');
const formatsJson = c.req.query('formats');
// Fetch source image (with coalescing + source-cache)
const urlHash = createHash('sha256').update(sourceUrl).digest('hex');
const sourceFilename = `source_${urlHash}`;
const sourcePath = path.join(CACHE_DIR, sourceFilename);
let inputBuffer: Buffer;
try {
inputBuffer = await fs.readFile(sourcePath);
} catch {
inputBuffer = await fetchImageCoalesced(sourceUrl);
await fs.writeFile(sourcePath, inputBuffer).catch(e => {
logger.error({ err: e }, 'Failed to write source cache');
});
}
// Defaults
const sizes: number[] = sizesJson ? JSON.parse(sizesJson) : [180, 640, 1024, 2048];
const formats: string[] = formatsJson ? JSON.parse(formatsJson) : ['avif', 'webp'];
const meta = await sharp(inputBuffer).metadata();
const originalFormat = meta.format || 'jpeg';
const targetFormats = formats.map(f => f === 'original' || f === 'jpg' ? (originalFormat === 'jpeg' ? 'jpeg' : originalFormat) : f);
const uniqueFormats = [...new Set(targetFormats)];
const uniqueSizes = [...new Set(sizes)].sort((a: number, b: number) => a - b);
const sources: { srcset: string; type: string }[] = [];
let fallbackSrc = '';
let fallbackWidth = 0;
let fallbackHeight = 0;
let fallbackFormat = '';
const baseUrl = process.env.SERVER_IMAGE_API_URL || 'https://service.polymech.info';
const LAZY_THRESHOLD = 600;
for (const format of uniqueFormats) {
const srcSetParts: string[] = [];
for (const width of uniqueSizes) {
const variantStart = performance.now();
const isLazy = width > LAZY_THRESHOLD;
if (isLazy) {
const renderUrl = `${baseUrl}/api/images/render?url=${encodeURIComponent(sourceUrl)}&width=${width}&format=${format}`;
srcSetParts.push(`${renderUrl} ${width}w`);
variantStats.push({ width, format, lazy: true, duration: performance.now() - variantStart });
if (!fallbackSrc || (format === 'jpeg' && fallbackFormat !== 'jpeg') || (width > fallbackWidth && format === fallbackFormat)) {
fallbackSrc = renderUrl;
fallbackWidth = width;
fallbackHeight = 0;
fallbackFormat = format;
}
// NOTE: background pre-warm removed — concurrent writes to the same
// cache file (pre-warm + real render request) produced corrupt AVIF.
// Revisit with write-to-temp-then-rename (atomic) if needed.
continue;
}
// EAGER GENERATION
try {
const res = await _ensureCachedImage(inputBuffer, width, undefined, format);
const filename = res.filename;
variantStats.push({ width, format, lazy: false, hit: res.hit, duration: performance.now() - variantStart });
const cachedUrl = `${baseUrl}/api/images/cache/${filename}`;
srcSetParts.push(`${cachedUrl} ${width}w`);
if (!fallbackSrc || (format === 'jpeg' && fallbackFormat !== 'jpeg') || (width > fallbackWidth && format === fallbackFormat)) {
fallbackSrc = cachedUrl;
fallbackWidth = width;
fallbackFormat = format;
}
} catch (e: any) {
logger.error({ err: e }, 'Failed to cache image variant');
variantStats.push({ width, format, error: e.message, duration: performance.now() - variantStart });
continue;
}
}
sources.push({
srcset: srcSetParts.join(', '),
type: `image/${format}`
});
}
const totalDuration = performance.now() - start;
const seconds = Math.floor(totalDuration / 1000);
const ms = Math.floor(totalDuration % 1000);
const durationFormatted = `${seconds}:${ms.toString().padStart(3, '0')}`;
const performanceStats = {
totalDuration: durationFormatted,
variants: variantStats.map(v => {
const vSeconds = Math.floor(v.duration / 1000);
const vMs = Math.floor(v.duration % 1000);
return { ...v, duration: `${vSeconds}:${vMs.toString().padStart(3, '0')}` };
}),
url: sourceUrl
};
logger.debug({
msg: 'Responsive image generation complete (GET)',
performance: performanceStats
});
// Cache the JSON response — deterministic for the same url+sizes+formats
c.header('Cache-Control', `public, max-age=${CACHE_TTL}, immutable`);
return c.json({
img: {
src: fallbackSrc,
width: fallbackWidth,
height: fallbackHeight,
format: fallbackFormat
},
sources,
stats: performanceStats
});
} catch (err: any) {
logger.error({ err }, 'Responsive image generation (GET) failed');
return c.text(err.message, 500);
}
}
async handleRenderImage(c: Context) {
const start = performance.now();
const reqId = Math.random().toString(36).slice(2, 8);
const fmtMs = (v: number) => `${Math.floor(v / 1000)}:${Math.floor(v % 1000).toString().padStart(3, '0')}`;
const url = c.req.query('url');
if (!url) return c.text('URL required', 400);
try {
const widthStr = c.req.query('width');
const heightStr = c.req.query('height');
const formatStr = c.req.query('format');
const square = c.req.query('square') === 'true';
const contain = c.req.query('contain') === 'true';
const width = widthStr ? parseInt(widthStr) : undefined;
const height = heightStr ? parseInt(heightStr) : undefined;
const format = formatStr || 'jpeg';
const hashKey = createHash('sha256')
.update(url)
.update(`w${width}h${height}f${format}${square ? 'sq' : ''}${contain ? 'ct' : ''}`)
.digest('hex');
const filename = `${hashKey}.${format}`;
const filepath = path.join(CACHE_DIR, filename);
logger.info({ reqId, url, width, height, format, hash: hashKey.slice(0, 12) }, 'render: incoming');
// 1. Check disk cache
try {
const content = await fs.readFile(filepath);
if (content.length === 0) {
logger.warn({ reqId, filepath }, 'render: cached file is empty (corrupt), treating as miss');
await fs.unlink(filepath).catch(() => {});
} else {
logger.info({ reqId, sizeKB: Math.round(content.length / 1024), elapsed: fmtMs(performance.now() - start) }, 'render: cache HIT');
c.header('Content-Type', `image/${format}`);
c.header('Cache-Control', `public, max-age=${CACHE_TTL}, immutable`);
return c.body(content);
}
} catch {
// cache miss
}
// 2. Coalesce concurrent renders for the same variant
let processedBuffer: Buffer;
if (ongoingRenders.has(hashKey)) {
logger.info({ reqId, hash: hashKey.slice(0, 12) }, 'render: coalescing (joining in-flight render)');
processedBuffer = await ongoingRenders.get(hashKey)!;
} else {
const renderPromise = (async () => {
// 3. Fetch source image
const fetchStart = performance.now();
logger.info({ reqId, url }, 'render: fetching source');
const inputBuffer = await fetchImageCoalesced(url);
const fetchMs = performance.now() - fetchStart;
logger.info({ reqId, fetchMs: fmtMs(fetchMs), srcKB: Math.round(inputBuffer.length / 1024) }, 'render: source fetched');
// 4. Encode — global slot limit (see IMAGE_ENCODE_MAX_CONCURRENT) + event-loop yield
try {
await _acquireEncode(reqId);
await yieldBeforeEncode();
const encodeStart = performance.now();
const buf = await this.performRenderImage(inputBuffer, filepath, { width, height, format, square, contain });
const encodeMs = performance.now() - encodeStart;
logger.info(
{ reqId, encodeMs: fmtMs(encodeMs), outKB: Math.round(buf.length / 1024), ..._encodeQueueStats() },
'render: encoded',
);
return buf;
} finally {
_releaseEncode();
}
})();
ongoingRenders.set(hashKey, renderPromise);
try {
processedBuffer = await renderPromise;
} finally {
ongoingRenders.delete(hashKey);
}
}
const elapsed = performance.now() - start;
logger.info({ reqId, elapsed: fmtMs(elapsed), sizeKB: Math.round(processedBuffer.length / 1024), width, format }, 'render: complete (miss)');
c.header('Content-Type', `image/${format}`);
c.header('Cache-Control', `public, max-age=${CACHE_TTL}, immutable`);
return c.body(processedBuffer as any);
} catch (err: any) {
const elapsed = performance.now() - start;
logger.error({ err, url, elapsed: fmtMs(elapsed), reqId }, 'render: FAILED');
return c.text('Internal Server Error', 500);
}
}
async handleTransformImage(c: Context) {
try {
const body = await c.req.parseBody();
const file = body['file'];
const operationsJson = body['operations'] as string;
if (!(file instanceof File)) {
return c.text('No file uploaded', 400);
}
const buffer = await file.arrayBuffer();
const inputBuffer = Buffer.from(buffer);
const operations = operationsJson ? JSON.parse(operationsJson) : [];
let pipeline = sharp(inputBuffer);
for (const op of operations) {
if (op.type === 'rotate') {
pipeline = pipeline.rotate(op.angle);
} else if (op.type === 'resize') {
pipeline = pipeline.resize({
width: op.width,
height: op.height,
fit: op.fit || 'contain',
background: { r: 0, g: 0, b: 0, alpha: 0 }
});
} else if (op.type === 'crop') {
pipeline = pipeline.extract({
left: Math.round(op.x),
top: Math.round(op.y),
width: Math.round(op.width),
height: Math.round(op.height)
});
} else if (op.type === 'flip') {
if (op.direction === 'horizontal') pipeline = pipeline.flop();
if (op.direction === 'vertical') pipeline = pipeline.flip();
} else if (op.type === 'adjust') {
if (op.brightness) {
// Sharp's modulate brightness is a multiplier (1.0 = original) if using recent versions,
// or it might be different.
// Actually pipeline.modulate({ brightness: 0.5 }) usually means 0.5x brightness?
// Let's check docs or assume standard multiplier.
// Wait, common Sharp modulate brightness is additive? No, documentation says "brightness: brightness multiplier".
pipeline = pipeline.modulate({ brightness: op.brightness });
}
if (op.contrast) {
// Contrast: C * (pixel - 128) + 128 => C * pixel + (128 - 128*C)
// This keeps mid-grey fixed.
const a = op.contrast;
const b = 128 * (1 - a);
pipeline = pipeline.linear(a, b);
}
}
}
const processedBuffer = await pipeline.toBuffer();
const meta = await sharp(processedBuffer).metadata();
c.header('Content-Type', `image/${meta.format}`);
return c.body(processedBuffer as any);
} catch (err: any) {
logger.error({ err }, 'Image transformation failed');
return c.text(err.message, 500);
}
}
}