184 lines
6.9 KiB
JavaScript
184 lines
6.9 KiB
JavaScript
#!/usr/bin/env node
|
|
/**
|
|
* Reprocess uploads for existing Photo documents.
|
|
*
|
|
* - For each Photo in Mongo, find a matching source image in uploads/.
|
|
* - Apply the same watermark + resize pipeline used by the upload endpoint.
|
|
* - Write main/medium/thumb variants (WEBP) and update the Photo doc paths.
|
|
* - Photos without a matching source file are skipped.
|
|
*
|
|
* Usage:
|
|
* APPLY=1 node scripts/reprocess_uploads.js # actually write files + update docs
|
|
* node scripts/reprocess_uploads.js # dry run (default)
|
|
*
|
|
* Env:
|
|
* MONGO_URI (optional) - defaults to mongodb://localhost:27017/photogallery
|
|
*/
|
|
|
|
const fs = require('fs');
|
|
const fsPromises = fs.promises;
|
|
const path = require('path');
|
|
const crypto = require('crypto');
|
|
const sharp = require('sharp');
|
|
const mongoose = require('mongoose');
|
|
const Photo = require('../models/photo');
|
|
|
|
const MONGO_URI = process.env.MONGO_URI || 'mongodb://localhost:27017/photogallery';
|
|
const APPLY = process.env.APPLY === '1';
|
|
const UPLOAD_DIR = path.join(__dirname, '..', 'uploads');
|
|
|
|
const VARIANTS = {
|
|
main: { size: 2000, quality: 82, suffix: '' },
|
|
medium: { size: 1200, quality: 80, suffix: '-md' },
|
|
thumb: { size: 640, quality: 76, suffix: '-sm' },
|
|
};
|
|
|
|
const diagonalOverlay = Buffer.from(`
|
|
<svg width="2400" height="2400" viewBox="0 0 2400 2400" xmlns="http://www.w3.org/2000/svg">
|
|
<defs>
|
|
<linearGradient id="diagGrad" x1="0%" y1="0%" x2="100%" y2="100%">
|
|
<stop offset="0%" stop-color="rgba(255,255,255,0.22)" />
|
|
<stop offset="50%" stop-color="rgba(255,255,255,0.33)" />
|
|
<stop offset="100%" stop-color="rgba(255,255,255,0.22)" />
|
|
</linearGradient>
|
|
</defs>
|
|
<g transform="translate(1200 1200) rotate(-32)">
|
|
<text x="0" y="-80" text-anchor="middle" dominant-baseline="middle"
|
|
fill="url(#diagGrad)" stroke="rgba(0,0,0,0.16)" stroke-width="8"
|
|
font-family="Arial Black, Arial, sans-serif" font-size="260" letter-spacing="6" textLength="1800" lengthAdjust="spacingAndGlyphs">
|
|
BEACH PARTY
|
|
<tspan x="0" dy="280">BALLOONS</tspan>
|
|
</text>
|
|
</g>
|
|
</svg>
|
|
`);
|
|
|
|
const HEIF_BRANDS = new Set(['heic', 'heix', 'hevc', 'heim', 'heis', 'hevm', 'hevs', 'mif1', 'msf1', 'avif', 'avis']);
|
|
const isHeifBuffer = (buffer) => buffer && buffer.length >= 12 && HEIF_BRANDS.has(buffer.slice(8, 12).toString('ascii').toLowerCase());
|
|
|
|
function parseBaseName(doc) {
|
|
const raw = path.basename(doc.filename || doc.path || '', path.extname(doc.filename || doc.path || ''));
|
|
const match = raw.match(/^(.*?)(-md|-sm)?$/);
|
|
return match ? match[1] : raw;
|
|
}
|
|
|
|
function sourceCandidates(doc) {
|
|
const baseName = parseBaseName(doc);
|
|
const preferred = [];
|
|
const fromDocPath = doc.path ? doc.path.replace(/^\/+/, '') : '';
|
|
const fromDocFile = doc.filename ? path.join('uploads', doc.filename) : '';
|
|
[fromDocPath, fromDocFile]
|
|
.filter(Boolean)
|
|
.forEach(rel => preferred.push(path.join(UPLOAD_DIR, rel.replace(/^uploads[\\/]/, ''))));
|
|
|
|
preferred.push(
|
|
path.join(UPLOAD_DIR, `${baseName}.webp`),
|
|
path.join(UPLOAD_DIR, `${baseName}-md.webp`),
|
|
path.join(UPLOAD_DIR, `${baseName}-sm.webp`)
|
|
);
|
|
return preferred;
|
|
}
|
|
|
|
async function findExistingFile(candidates) {
|
|
for (const file of candidates) {
|
|
try {
|
|
const stat = await fsPromises.stat(file);
|
|
if (stat.isFile()) return file;
|
|
} catch (_) { /* ignore missing */ }
|
|
}
|
|
return null;
|
|
}
|
|
|
|
async function stampAndVariants(inputBuffer, baseName) {
|
|
// Build main stamped image
|
|
const base = sharp(inputBuffer)
|
|
.rotate()
|
|
.resize({ width: VARIANTS.main.size, height: VARIANTS.main.size, fit: 'inside', withoutEnlargement: true })
|
|
.toColorspace('srgb');
|
|
|
|
const { data: baseBuffer, info } = await base.toBuffer({ resolveWithObject: true });
|
|
const targetWidth = Math.max(Math.floor((info.width || VARIANTS.main.size) * 0.98), 1);
|
|
const targetHeight = Math.max(Math.floor((info.height || VARIANTS.main.size) * 0.98), 1);
|
|
|
|
const overlayBuffer = await sharp(diagonalOverlay, { density: 300 })
|
|
.resize({ width: targetWidth, height: targetHeight, fit: 'cover' })
|
|
.png()
|
|
.toBuffer();
|
|
|
|
const stamped = await sharp(baseBuffer)
|
|
.composite([{ input: overlayBuffer, gravity: 'center' }])
|
|
.toFormat('webp', { quality: VARIANTS.main.quality, effort: 5 })
|
|
.toBuffer();
|
|
|
|
const outputs = {
|
|
main: { filename: `${baseName}${VARIANTS.main.suffix}.webp`, buffer: stamped },
|
|
};
|
|
|
|
const createVariant = async (key, opts) => {
|
|
const resized = await sharp(stamped)
|
|
.resize({ width: opts.size, height: opts.size, fit: 'inside', withoutEnlargement: true })
|
|
.toFormat('webp', { quality: opts.quality, effort: 5 })
|
|
.toBuffer();
|
|
outputs[key] = { filename: `${baseName}${opts.suffix}.webp`, buffer: resized };
|
|
};
|
|
await createVariant('medium', VARIANTS.medium);
|
|
await createVariant('thumb', VARIANTS.thumb);
|
|
|
|
return outputs;
|
|
}
|
|
|
|
async function processDoc(doc) {
|
|
const candidates = sourceCandidates(doc);
|
|
const sourceFile = await findExistingFile(candidates);
|
|
if (!sourceFile) {
|
|
return { status: 'missing-source', docId: doc._id, base: parseBaseName(doc) };
|
|
}
|
|
|
|
const inputBuffer = await fsPromises.readFile(sourceFile);
|
|
const hash = crypto.createHash('sha256').update(inputBuffer).digest('hex');
|
|
|
|
const outputs = await stampAndVariants(inputBuffer, parseBaseName(doc));
|
|
if (APPLY) {
|
|
for (const { filename, buffer } of Object.values(outputs)) {
|
|
await fsPromises.writeFile(path.join(UPLOAD_DIR, filename), buffer);
|
|
}
|
|
doc.path = path.posix.join('uploads', outputs.main.filename);
|
|
doc.variants = {
|
|
medium: path.posix.join('uploads', outputs.medium.filename),
|
|
thumb: path.posix.join('uploads', outputs.thumb.filename),
|
|
};
|
|
doc.hash = doc.hash || hash;
|
|
await doc.save();
|
|
}
|
|
|
|
return { status: 'processed', docId: doc._id, base: parseBaseName(doc) };
|
|
}
|
|
|
|
async function main() {
|
|
await mongoose.connect(MONGO_URI);
|
|
console.log(`Connected to Mongo: ${MONGO_URI}`);
|
|
|
|
const docs = await Photo.find({});
|
|
console.log(`Found ${docs.length} photo docs. APPLY=${APPLY ? 'yes' : 'no (dry run)'}`);
|
|
|
|
const results = { processed: 0, missing: 0 };
|
|
for (const doc of docs) {
|
|
try {
|
|
const res = await processDoc(doc);
|
|
if (res.status === 'processed') results.processed++;
|
|
else results.missing++;
|
|
} catch (err) {
|
|
console.error(`Error processing doc ${doc._id}:`, err.message || err);
|
|
results.missing++;
|
|
}
|
|
}
|
|
|
|
console.log(`Done. Processed: ${results.processed}. Skipped (no source/errors): ${results.missing}.`);
|
|
await mongoose.disconnect();
|
|
}
|
|
|
|
main().catch(err => {
|
|
console.error(err);
|
|
process.exit(1);
|
|
});
|