401 lines
16 KiB
JavaScript
401 lines
16 KiB
JavaScript
const router = require('express').Router();
|
|
const multer = require('multer');
|
|
const Photo = require('../models/photo.js');
|
|
const fs = require('fs');
|
|
const path = require('path');
|
|
const crypto = require('crypto');
|
|
const fsPromises = require('fs').promises;
|
|
const { Blob } = require('buffer');
|
|
const FormData = global.FormData;
|
|
const sharp = require('sharp');
|
|
const heicConvert = require('heic-convert');
|
|
const {
|
|
MAIN_TAGS,
|
|
OTHER_TAGS,
|
|
TAG_DEFINITIONS,
|
|
TAG_PRESETS,
|
|
MAX_TAGS,
|
|
normalizeTags,
|
|
aliasMap,
|
|
labelLookup,
|
|
} = require('../lib/tagConfig');
|
|
|
|
const WATERMARK_URL = process.env.WATERMARK_URL || 'http://watermarker:8000/watermark';
|
|
// We now use a visible diagonal watermark only. Invisible watermarking is disabled by default.
|
|
const DISABLE_WM = true;
|
|
|
|
const VARIANTS = {
|
|
main: { size: 2000, quality: 82, suffix: '' },
|
|
medium: { size: 1200, quality: 80, suffix: '-md' },
|
|
thumb: { size: 640, quality: 76, suffix: '-sm' },
|
|
};
|
|
|
|
const HEIF_BRANDS = new Set([
|
|
'heic', 'heix', 'hevc', 'heim', 'heis', 'hevm', 'hevs', 'mif1', 'msf1', 'avif', 'avis'
|
|
]);
|
|
|
|
const isHeic = (file) => {
|
|
const mime = (file.mimetype || '').toLowerCase();
|
|
if (mime.includes('heic') || mime.includes('heif')) return true;
|
|
const ext = path.extname(file.originalname || '').toLowerCase();
|
|
return ext === '.heic' || ext === '.heif' || ext === '.avif';
|
|
};
|
|
|
|
const isHeifBuffer = (buffer) => {
|
|
if (!buffer || buffer.length < 12) return false;
|
|
// ISO BMFF brand is at bytes 8-11, e.g. "heic", "avif"
|
|
const brand = buffer.slice(8, 12).toString('ascii').toLowerCase();
|
|
return HEIF_BRANDS.has(brand);
|
|
};
|
|
|
|
async function applyInvisibleWatermark(buffer, payload, filename) {
|
|
// Invisible watermarking intentionally disabled.
|
|
return buffer;
|
|
}
|
|
|
|
// Multer setup for file uploads in memory
|
|
const storage = multer.memoryStorage();
|
|
const upload = multer({ storage: storage });
|
|
|
|
// GET all photos
|
|
router.route('/').get((req, res) => {
|
|
Photo.find().sort({ createdAt: -1 }) // Sort by newest first
|
|
.then(photos => res.json(photos))
|
|
.catch(err => res.status(400).json('Error: ' + err));
|
|
});
|
|
|
|
router.route('/tags').get(async (_req, res) => {
|
|
try {
|
|
const tagCountsArray = await Photo.aggregate([
|
|
{ $unwind: '$tags' },
|
|
{ $group: { _id: '$tags', count: { $sum: 1 } } }
|
|
]);
|
|
const tagCounts = tagCountsArray.reduce((acc, item) => {
|
|
acc[item._id] = item.count;
|
|
return acc;
|
|
}, {});
|
|
const existing = tagCountsArray.map(item => item._id);
|
|
res.json({
|
|
tags: TAG_DEFINITIONS,
|
|
main: MAIN_TAGS,
|
|
other: OTHER_TAGS,
|
|
aliases: aliasMap,
|
|
presets: TAG_PRESETS,
|
|
maxTags: MAX_TAGS,
|
|
labels: labelLookup,
|
|
existing: existing || [],
|
|
tagCounts,
|
|
});
|
|
} catch (err) {
|
|
console.error('Error fetching tag metadata:', err);
|
|
res.json({
|
|
tags: TAG_DEFINITIONS,
|
|
main: MAIN_TAGS,
|
|
other: OTHER_TAGS,
|
|
aliases: aliasMap,
|
|
presets: TAG_PRESETS,
|
|
maxTags: MAX_TAGS,
|
|
labels: labelLookup,
|
|
existing: [],
|
|
tagCounts: {},
|
|
});
|
|
}
|
|
});
|
|
|
|
const parseIncomingTags = (tagsInput) => {
|
|
const rawList = Array.isArray(tagsInput)
|
|
? tagsInput
|
|
: String(tagsInput || '').split(',').map(tag => tag.trim()).filter(Boolean);
|
|
return normalizeTags(rawList);
|
|
};
|
|
|
|
// POST new photo(s) with WebP conversion + duplicate hash checks
|
|
router.route('/upload').post(upload.array('photos'), async (req, res) => {
|
|
const files = (req.files && req.files.length) ? req.files : (req.file ? [req.file] : []);
|
|
if (!files.length) {
|
|
return res.status(400).json({ success: false, error: 'No file uploaded. Please select at least one file.' });
|
|
}
|
|
|
|
const { caption, tags } = req.body;
|
|
const captionText = typeof caption === 'string' ? caption.trim() : '';
|
|
if (!captionText) {
|
|
return res.status(400).json({ success: false, error: 'Caption is required.' });
|
|
}
|
|
const { normalized: tagList } = parseIncomingTags(tags);
|
|
if (!tagList.length) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
error: 'Please add at least one tag.',
|
|
});
|
|
}
|
|
if (tagList.length > MAX_TAGS) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
error: `Please use at most ${MAX_TAGS} tags.`
|
|
});
|
|
}
|
|
|
|
const processFile = async (file) => {
|
|
let inputBuffer = file.buffer;
|
|
let convertedFromHeif = false;
|
|
|
|
const convertHeifIfNeeded = async (force) => {
|
|
if (convertedFromHeif) return;
|
|
if (!force && !(isHeic(file) || isHeifBuffer(inputBuffer))) return;
|
|
try {
|
|
inputBuffer = await heicConvert({
|
|
buffer: inputBuffer,
|
|
format: 'JPEG',
|
|
quality: 1,
|
|
});
|
|
convertedFromHeif = true;
|
|
} catch (err) {
|
|
console.error('HEIC/HEIF conversion failed:', err);
|
|
throw new Error('Unable to process HEIC/HEIF image. Please try a different file.');
|
|
}
|
|
};
|
|
|
|
await convertHeifIfNeeded(false);
|
|
|
|
const hash = crypto.createHash('sha256').update(inputBuffer).digest('hex');
|
|
|
|
let existing = null;
|
|
try {
|
|
existing = await Photo.findOne({ hash });
|
|
} catch (err) {
|
|
console.error('Error checking duplicate hash:', err);
|
|
throw new Error('Server error checking duplicates.');
|
|
}
|
|
if (existing) {
|
|
return { duplicate: true, hash, existingId: existing._id, filename: existing.filename };
|
|
}
|
|
|
|
const originalName = path.parse(file.originalname).name;
|
|
const baseName = `${Date.now()}-${originalName}`;
|
|
const makeFilename = (suffix) => `${baseName}${suffix}.webp`;
|
|
const filename = makeFilename(VARIANTS.main.suffix);
|
|
const filepath = path.join('uploads', filename);
|
|
const hiddenColor = [
|
|
parseInt(hash.substring(0, 2), 16),
|
|
parseInt(hash.substring(2, 4), 16),
|
|
parseInt(hash.substring(4, 6), 16),
|
|
];
|
|
|
|
const diagonalOverlay = Buffer.from(`
|
|
<svg width="2400" height="2400" viewBox="0 0 2400 2400" xmlns="http://www.w3.org/2000/svg">
|
|
<defs>
|
|
<linearGradient id="diagGrad" x1="0%" y1="0%" x2="100%" y2="100%">
|
|
<stop offset="0%" stop-color="rgba(255,255,255,0.22)" />
|
|
<stop offset="50%" stop-color="rgba(255,255,255,0.33)" />
|
|
<stop offset="100%" stop-color="rgba(255,255,255,0.22)" />
|
|
</linearGradient>
|
|
</defs>
|
|
<g transform="translate(1200 1200) rotate(-32)">
|
|
<text x="0" y="-80" text-anchor="middle" dominant-baseline="middle"
|
|
fill="url(#diagGrad)" stroke="rgba(0,0,0,0.16)" stroke-width="8"
|
|
font-family="Arial Black, Arial, sans-serif" font-size="260" letter-spacing="6" textLength="1800" lengthAdjust="spacingAndGlyphs">
|
|
BEACH PARTY
|
|
<tspan x="0" dy="280">BALLOONS</tspan>
|
|
</text>
|
|
</g>
|
|
</svg>
|
|
`);
|
|
|
|
let buffer;
|
|
try {
|
|
// Prepare base image first so we know its post-resize dimensions, then scale overlay slightly smaller to avoid size conflicts
|
|
const base = sharp(inputBuffer)
|
|
.rotate()
|
|
.resize({ width: VARIANTS.main.size, height: VARIANTS.main.size, fit: 'inside', withoutEnlargement: true })
|
|
.toColorspace('srgb');
|
|
|
|
const { data: baseBuffer, info } = await base.toBuffer({ resolveWithObject: true });
|
|
const targetWidth = Math.max(Math.floor((info.width || VARIANTS.main.size) * 0.98), 1);
|
|
const targetHeight = Math.max(Math.floor((info.height || VARIANTS.main.size) * 0.98), 1);
|
|
|
|
// Scale the diagonal overlay to slightly smaller than the image to ensure it composites cleanly
|
|
const overlayBuffer = await sharp(diagonalOverlay, { density: 300 })
|
|
.resize({ width: targetWidth, height: targetHeight, fit: 'cover' })
|
|
.png()
|
|
.toBuffer();
|
|
|
|
buffer = await sharp(baseBuffer)
|
|
.composite([
|
|
{ input: overlayBuffer, gravity: 'center' },
|
|
])
|
|
.toFormat('webp', { quality: VARIANTS.main.quality, effort: 5 })
|
|
.toBuffer();
|
|
} catch (err) {
|
|
console.error('Error processing image with sharp:', err);
|
|
const needsHeifFallback = err.message && err.message.toLowerCase().includes('no decoding plugin');
|
|
if (!convertedFromHeif && needsHeifFallback) {
|
|
await convertHeifIfNeeded(true);
|
|
try {
|
|
const baseRetry = sharp(inputBuffer)
|
|
.rotate()
|
|
.resize({ width: VARIANTS.main.size, height: VARIANTS.main.size, fit: 'inside', withoutEnlargement: true })
|
|
.toColorspace('srgb');
|
|
|
|
const { data: baseBufferRetry, info: infoRetry } = await baseRetry.toBuffer({ resolveWithObject: true });
|
|
const overlayRetry = await sharp(diagonalOverlay, { density: 300 })
|
|
.resize({
|
|
width: Math.max(Math.floor((infoRetry.width || VARIANTS.main.size) * 0.98), 1),
|
|
height: Math.max(Math.floor((infoRetry.height || VARIANTS.main.size) * 0.98), 1),
|
|
fit: 'cover'
|
|
})
|
|
.png()
|
|
.toBuffer();
|
|
|
|
buffer = await sharp(baseBufferRetry)
|
|
.composite([
|
|
{ input: overlayRetry, gravity: 'center' },
|
|
])
|
|
.toFormat('webp', { quality: VARIANTS.main.quality, effort: 5 })
|
|
.toBuffer();
|
|
} catch (secondErr) {
|
|
console.error('Retry after HEIF conversion failed:', secondErr);
|
|
throw new Error('Server error during image processing.');
|
|
}
|
|
} else {
|
|
throw new Error('Server error during image processing.');
|
|
}
|
|
}
|
|
|
|
try {
|
|
const stampedBuffer = buffer;
|
|
await fsPromises.writeFile(filepath, stampedBuffer);
|
|
// Create responsive variants from the stamped image to keep overlays consistent
|
|
const variants = {};
|
|
const createVariant = async (key, opts) => {
|
|
const variantPath = path.join('uploads', makeFilename(opts.suffix));
|
|
const resized = await sharp(stampedBuffer)
|
|
.resize({ width: opts.size, height: opts.size, fit: 'inside', withoutEnlargement: true })
|
|
.toFormat('webp', { quality: opts.quality, effort: 5 })
|
|
.toBuffer();
|
|
await fsPromises.writeFile(variantPath, resized);
|
|
variants[key] = variantPath;
|
|
};
|
|
await createVariant('medium', VARIANTS.medium);
|
|
await createVariant('thumb', VARIANTS.thumb);
|
|
const newPhoto = new Photo({
|
|
filename: makeFilename(VARIANTS.main.suffix),
|
|
path: filepath,
|
|
variants,
|
|
caption: captionText,
|
|
tags: tagList,
|
|
hash
|
|
});
|
|
|
|
try {
|
|
await newPhoto.save();
|
|
return { duplicate: false, photo: newPhoto };
|
|
} catch (saveErr) {
|
|
// Handle race where another upload wrote the same hash between findOne and save
|
|
if (saveErr.code === 11000) {
|
|
const dup = await Photo.findOne({ hash });
|
|
return { duplicate: true, hash, existingId: dup?._id, filename: dup?.filename };
|
|
}
|
|
console.error('Error saving photo to database:', saveErr);
|
|
throw new Error('Server error saving photo to database.');
|
|
}
|
|
} catch (err) {
|
|
console.error('Error finalizing photo:', err);
|
|
throw err;
|
|
}
|
|
};
|
|
|
|
try {
|
|
const results = await Promise.all(files.map(processFile));
|
|
const uploadedPhotos = results.filter(r => !r.duplicate).map(r => r.photo);
|
|
const skipped = results.filter(r => r.duplicate).map(r => ({
|
|
hash: r.hash,
|
|
existingId: r.existingId,
|
|
filename: r.filename
|
|
}));
|
|
|
|
const uploadedCount = uploadedPhotos.length;
|
|
const skippedCount = skipped.length;
|
|
let message = 'Upload complete.';
|
|
if (uploadedCount && !skippedCount) {
|
|
message = uploadedCount > 1 ? 'Photos uploaded and converted successfully!' : 'Photo uploaded and converted successfully!';
|
|
} else if (!uploadedCount && skippedCount) {
|
|
message = 'Skipped upload: files already exist in the gallery.';
|
|
} else if (uploadedCount && skippedCount) {
|
|
message = `Uploaded ${uploadedCount} file${uploadedCount === 1 ? '' : 's'}; skipped ${skippedCount} duplicate${skippedCount === 1 ? '' : 's'}.`;
|
|
}
|
|
|
|
res.json({
|
|
success: true,
|
|
message,
|
|
uploaded: uploadedPhotos,
|
|
skipped
|
|
});
|
|
} catch (error) {
|
|
res.status(500).json({ success: false, error: error.message || 'Server error during upload.' });
|
|
}
|
|
});
|
|
|
|
// GET a single photo by ID
|
|
router.route('/:id').get((req, res) => {
|
|
Photo.findById(req.params.id)
|
|
.then(photo => res.json(photo))
|
|
.catch(err => res.status(400).json('Error: ' + err));
|
|
});
|
|
|
|
// DELETE a photo by ID
|
|
router.route('/:id').delete((req, res) => {
|
|
console.log('DELETE request received for photo ID:', req.params.id);
|
|
console.log('Request headers:', req.headers);
|
|
console.log('Request IP:', req.ip);
|
|
Photo.findByIdAndDelete(req.params.id)
|
|
.then(photo => {
|
|
if (photo) {
|
|
const pathsToDelete = [photo.path];
|
|
if (photo.variants) {
|
|
if (photo.variants.medium) pathsToDelete.push(photo.variants.medium);
|
|
if (photo.variants.thumb) pathsToDelete.push(photo.variants.thumb);
|
|
}
|
|
Promise.all(pathsToDelete.map(p => fsPromises.unlink(p).catch(() => null)))
|
|
.then(() => res.json('Photo deleted.'))
|
|
.catch(err => {
|
|
console.error('Error deleting photo files:', err);
|
|
res.json('Photo deleted (files cleanup may be incomplete).');
|
|
});
|
|
} else {
|
|
res.status(404).json('Error: Photo not found.');
|
|
}
|
|
})
|
|
.catch(err => res.status(400).json('Error: ' + err));
|
|
});
|
|
|
|
// UPDATE a photo by ID
|
|
router.route('/update/:id').post((req, res) => {
|
|
Photo.findById(req.params.id)
|
|
.then(photo => {
|
|
const incomingCaption = req.body.caption;
|
|
const incomingTags = req.body.tags;
|
|
const captionText = typeof incomingCaption === 'string' ? incomingCaption.trim() : '';
|
|
const { normalized: tagList } = parseIncomingTags(incomingTags);
|
|
|
|
if (!captionText) {
|
|
return res.status(400).json('Caption is required.');
|
|
}
|
|
if (!tagList.length) {
|
|
return res.status(400).json(`Please add at least one tag (${MAX_TAGS} max).`);
|
|
}
|
|
if (tagList.length > MAX_TAGS) {
|
|
return res.status(400).json(`Please keep tags under ${MAX_TAGS}.`);
|
|
}
|
|
|
|
photo.caption = captionText;
|
|
photo.tags = tagList;
|
|
|
|
photo.save()
|
|
.then(() => res.json('Photo updated!'))
|
|
.catch(err => res.status(400).json('Error: ' + err));
|
|
})
|
|
.catch(err => res.status(400).json('Error: ' + err));
|
|
});
|
|
|
|
module.exports = router;
|