This commit is contained in:
babayaga 2025-08-16 11:49:45 +02:00
parent 7ff02e2ea9
commit 54133b9374
5 changed files with 114 additions and 25 deletions

View File

@ -5,6 +5,7 @@ import getImageSources from "./getImageSources.js";
import getProcessedImage from "./getProcessedImage.js";
import getArtDirectedImages from "./getArtDirectedImages.js";
import pMap from "p-map";
import { get_cached_object, set_cached_object } from '@polymech/cache';
const imagesData = new Map();
@ -26,16 +27,27 @@ export default async function ({
try {
const args = Array.from(arguments);
const hash = objectHash(args);
// Check in-memory cache first
if (imagesData.has(hash)) {
return imagesData.get(hash);
}
// Check persistent cache
const cacheKey = { src, type, imagesizes, format, breakpoints, placeholder, fallbackFormat, includeSourceFormat, formatOptions, artDirectives, transformConfigs };
const cachedResult = await get_cached_object(cacheKey, 'astro-imagetools');
if (cachedResult) {
console.log(`Cache hit for ${type} at ${src}`);
imagesData.set(hash, cachedResult);
return cachedResult;
}
const start = performance.now();
const { path, base, rest, image, imageWidth, imageHeight, imageFormat } =
await getProcessedImage(src, transformConfigs);
await delay(250);
src = path;
rest.aspect = `${imageWidth / imageHeight}`;
@ -63,7 +75,6 @@ export default async function ({
rest
),
async () => {
await delay(250);
return await getArtDirectedImages(
artDirectives,
placeholder,
@ -91,7 +102,13 @@ export default async function ({
images,
};
// Cache both in memory and persistently
imagesData.set(hash, returnObject);
await set_cached_object(cacheKey, 'astro-imagetools', returnObject, {
src: args[0].src,
type,
timestamp: Date.now()
});
const end = performance.now();

View File

@ -11,22 +11,49 @@ import {
const { fileTypeFromBuffer } = await import("file-type");
// Retry mechanism with exponential backoff
async function retryWithBackoff(fn, retries = 3, baseDelay = 100) {
for (let i = 0; i < retries; i++) {
try {
return await fn();
} catch (error) {
if (i === retries - 1) {
throw error; // Last attempt failed
}
// Check if it's a file system error that we should retry
const isRetryableError = error.code === 'EBUSY' ||
error.code === 'ENOENT' ||
error.code === 'EPERM' ||
error.errno === -4094 || // UNKNOWN error on Windows
error.message.includes('UNKNOWN: unknown error');
if (!isRetryableError) {
throw error; // Don't retry non-transient errors
}
const delay = baseDelay * Math.pow(2, i); // Exponential backoff
console.warn(`Retry attempt ${i + 1}/${retries} for file operation after ${delay}ms delay:`, error.message);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
}
export default async function getResolvedSrc(src) {
const token = crypto.createHash("md5").update(src).digest("hex");
let filepath = fsCachePath + token;
const fileExists = (() => {
const fileExists = await retryWithBackoff(() => {
for (const type of supportedImageTypes) {
const fileExists = fs.existsSync(filepath + `.${type}`);
if (fileExists) {
filepath += `.${type}`;
return true;
}
}
})();
return false;
});
if (!fileExists) {
const buffer = Buffer.from(await (await fetch(src)).arrayBuffer());
@ -37,7 +64,17 @@ export default async function getResolvedSrc(src) {
filepath += `.${ext}`;
fs.writeFileSync(filepath, buffer);
// Use retry mechanism for file write operations
await retryWithBackoff(() => {
return new Promise((resolve, reject) => {
try {
fs.writeFileSync(filepath, buffer);
resolve(undefined);
} catch (error) {
reject(error);
}
});
});
}
const base = /^https?:/.test(src)

View File

@ -1,12 +1,12 @@
export default {
"environment": "dev",
"environment": "build",
"isSsrBuild": false,
"projectBase": "",
"publicDir": "C:\\Users\\zx\\Desktop\\polymech\\site2\\public\\",
"rootDir": "C:\\Users\\zx\\Desktop\\polymech\\site2\\",
"mode": "dev",
"outDir": "dist",
"assetsDir": "/_astro",
"mode": "production",
"outDir": "C:\\Users\\zx\\Desktop\\polymech\\site2\\dist\\",
"assetsDir": "_astro",
"sourcemap": false,
"assetFileNames": "/_astro/[name]@[width].[hash][extname]"
}

View File

@ -9,6 +9,8 @@ import pMap from "p-map"
const filename = fileURLToPath(import.meta.url);
const astroViteConfigsPath = resolve(filename, "../../astroViteConfigs.js");
import { get_cached, set_cached, get_path_cached } from '@polymech/cache'
export default {
name: "imagetools",
hooks: {
@ -59,22 +61,52 @@ export default {
await pMap(
assetPaths,
async ([assetPath, { hash, image, buffer }]) => {
// delay, otherwise unknown errors occur (sharp/vips)
await new Promise((resolve) => setTimeout(resolve, 250));
// Retry mechanism with exponential backoff for image processing
const retryWithBackoff = async (fn, retries = 3, baseDelay = 10) => {
for (let i = 0; i < retries; i++) {
try {
return await fn();
} catch (error) {
if (i === retries - 1) {
throw error; // Last attempt failed
}
// Check if it's a vips/sharp related error that we should retry
const isRetryableError = error.message.includes('vips') ||
error.message.includes('sharp') ||
error.message.includes('UNKNOWN: unknown error') ||
error.code === 'EBUSY' ||
error.code === 'ENOENT' ||
error.errno === -4094;
if (!isRetryableError) {
throw error; // Don't retry non-transient errors
}
const delay = baseDelay * Math.pow(2, i); // Exponential backoff
console.warn(`Retry attempt ${i + 1}/${retries} for image ${assetPath} after ${delay}ms delay:`, error.message);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
};
try {
await saveAndCopyAsset(
hash,
image,
buffer,
outDir,
assetsDir,
assetPath,
isSsrBuild
);
await retryWithBackoff(async () => {
await saveAndCopyAsset(
hash,
image,
buffer,
outDir,
assetsDir,
assetPath,
isSsrBuild
);
});
console.log(`Image processed: ${assetPath}`);
} catch (error) {
console.error(error)
console.error(`Failed to process image ${assetPath} after retries:`, error);
// Continue processing other images even if one fails
}
console.log(`Image processed: ${assetPath}`)
},
// higher concurrency causes sharp/vips errors as well
{ concurrency: 1 }

View File

@ -41,6 +41,9 @@
"homepage": "https://github.com/RafidMuhymin/astro-imagetools#readme",
"dependencies": {
"@astropub/codecs": "0.4.4",
"@polymech/cache": "file:../../../polymech-mono/packages/cache",
"@polymech/commons": "file:../../../polymech-mono/packages/commons",
"@polymech/fs": "file:../../../polymech-mono/packages/fs",
"file-type": "17.1.1",
"find-cache-dir": "3.3.2",
"find-up": "^6.3.0",