feat(worker): implement complete storage and file processing services
- Add MinIO and AWS S3 storage providers with unified interface - Implement comprehensive file processor with Sharp integration - Create EXIF data preservation service with metadata extraction - Add ZIP creator service with batch processing capabilities - Include image optimization, thumbnails, and format conversion - Add GPS coordinate extraction and camera info parsing - Implement virus scanning integration points - Support both cloud storage and local file processing 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
1329e874a4
commit
1f45c57dbf
7 changed files with 2540 additions and 0 deletions
455
packages/worker/src/storage/exif-preserver.service.ts
Normal file
455
packages/worker/src/storage/exif-preserver.service.ts
Normal file
|
@ -0,0 +1,455 @@
|
|||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import * as exifr from 'exifr';
|
||||
import * as piexif from 'piexifjs';
|
||||
import * as fs from 'fs/promises';
|
||||
|
||||
export interface ExifData {
|
||||
exif?: any;
|
||||
iptc?: any;
|
||||
xmp?: any;
|
||||
icc?: any;
|
||||
tiff?: any;
|
||||
gps?: any;
|
||||
}
|
||||
|
||||
export interface GpsCoordinates {
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
altitude?: number;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ExifPreserverService {
|
||||
private readonly logger = new Logger(ExifPreserverService.name);
|
||||
|
||||
constructor(private configService: ConfigService) {
|
||||
this.logger.log('EXIF Preserver Service initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all EXIF data from image file
|
||||
*/
|
||||
async extractExif(filePath: string): Promise<ExifData> {
|
||||
try {
|
||||
this.logger.debug(`Extracting EXIF data from: ${filePath}`);
|
||||
|
||||
// Use exifr to extract comprehensive metadata
|
||||
const exifData = await exifr.parse(filePath, {
|
||||
exif: true,
|
||||
iptc: true,
|
||||
xmp: true,
|
||||
icc: true,
|
||||
tiff: true,
|
||||
gps: true,
|
||||
sanitize: false, // Keep all data
|
||||
reviveValues: true,
|
||||
translateKeys: false,
|
||||
translateValues: false,
|
||||
mergeOutput: false,
|
||||
});
|
||||
|
||||
if (!exifData) {
|
||||
this.logger.debug(`No EXIF data found in: ${filePath}`);
|
||||
return {};
|
||||
}
|
||||
|
||||
// Separate different metadata types
|
||||
const result: ExifData = {
|
||||
exif: exifData.exif || exifData.EXIF,
|
||||
iptc: exifData.iptc || exifData.IPTC,
|
||||
xmp: exifData.xmp || exifData.XMP,
|
||||
icc: exifData.icc || exifData.ICC,
|
||||
tiff: exifData.tiff || exifData.TIFF,
|
||||
gps: exifData.gps || exifData.GPS,
|
||||
};
|
||||
|
||||
// Log extracted data summary
|
||||
const hasExif = !!result.exif;
|
||||
const hasGps = !!result.gps && (result.gps.latitude || result.gps.GPSLatitude);
|
||||
const hasIptc = !!result.iptc;
|
||||
const hasXmp = !!result.xmp;
|
||||
|
||||
this.logger.debug(`EXIF extraction summary: EXIF=${hasExif}, GPS=${hasGps}, IPTC=${hasIptc}, XMP=${hasXmp}`);
|
||||
|
||||
return result;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to extract EXIF data from ${filePath}:`, error.message);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Preserve EXIF data by writing it to processed image
|
||||
*/
|
||||
async preserveExif(filePath: string, exifData: ExifData): Promise<void> {
|
||||
try {
|
||||
if (!exifData || Object.keys(exifData).length === 0) {
|
||||
this.logger.debug(`No EXIF data to preserve for: ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.debug(`Preserving EXIF data for: ${filePath}`);
|
||||
|
||||
// Read the processed image file
|
||||
const imageBuffer = await fs.readFile(filePath);
|
||||
|
||||
// Convert image to base64 for piexif processing
|
||||
const imageBase64 = imageBuffer.toString('binary');
|
||||
|
||||
// Prepare EXIF data for piexif
|
||||
const exifDict = this.prepareExifDict(exifData);
|
||||
|
||||
if (Object.keys(exifDict).length === 0) {
|
||||
this.logger.debug('No valid EXIF data to embed');
|
||||
return;
|
||||
}
|
||||
|
||||
// Convert EXIF dict to bytes
|
||||
const exifBytes = piexif.dump(exifDict);
|
||||
|
||||
// Insert EXIF data into image
|
||||
const newImageBase64 = piexif.insert(exifBytes, imageBase64);
|
||||
|
||||
// Convert back to buffer and save
|
||||
const newImageBuffer = Buffer.from(newImageBase64, 'binary');
|
||||
await fs.writeFile(filePath, newImageBuffer);
|
||||
|
||||
this.logger.debug(`EXIF data preserved successfully for: ${filePath}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to preserve EXIF data for ${filePath}:`, error.message);
|
||||
// Don't throw error as EXIF preservation is not critical for image processing
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove sensitive EXIF data while preserving useful metadata
|
||||
*/
|
||||
async sanitizeExif(filePath: string, options: {
|
||||
removeGps?: boolean;
|
||||
removeCamera?: boolean;
|
||||
removePersonalInfo?: boolean;
|
||||
preserveOrientation?: boolean;
|
||||
preserveDateTime?: boolean;
|
||||
} = {}): Promise<void> {
|
||||
try {
|
||||
const exifData = await this.extractExif(filePath);
|
||||
|
||||
if (!exifData.exif) {
|
||||
this.logger.debug(`No EXIF data to sanitize in: ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create sanitized EXIF data
|
||||
const sanitizedExif = { ...exifData };
|
||||
|
||||
// Remove GPS data if requested
|
||||
if (options.removeGps !== false) {
|
||||
delete sanitizedExif.gps;
|
||||
if (sanitizedExif.exif) {
|
||||
delete sanitizedExif.exif.GPSLatitude;
|
||||
delete sanitizedExif.exif.GPSLongitude;
|
||||
delete sanitizedExif.exif.GPSAltitude;
|
||||
delete sanitizedExif.exif.GPSLatitudeRef;
|
||||
delete sanitizedExif.exif.GPSLongitudeRef;
|
||||
delete sanitizedExif.exif.GPSAltitudeRef;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove camera/device specific info if requested
|
||||
if (options.removeCamera) {
|
||||
if (sanitizedExif.exif) {
|
||||
delete sanitizedExif.exif.Make;
|
||||
delete sanitizedExif.exif.Model;
|
||||
delete sanitizedExif.exif.Software;
|
||||
delete sanitizedExif.exif.SerialNumber;
|
||||
delete sanitizedExif.exif.LensModel;
|
||||
delete sanitizedExif.exif.LensSerialNumber;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove personal information if requested
|
||||
if (options.removePersonalInfo) {
|
||||
if (sanitizedExif.exif) {
|
||||
delete sanitizedExif.exif.Artist;
|
||||
delete sanitizedExif.exif.Copyright;
|
||||
delete sanitizedExif.exif.UserComment;
|
||||
}
|
||||
if (sanitizedExif.iptc) {
|
||||
delete sanitizedExif.iptc.By_line;
|
||||
delete sanitizedExif.iptc.Copyright_Notice;
|
||||
delete sanitizedExif.iptc.Contact;
|
||||
}
|
||||
}
|
||||
|
||||
// Preserve orientation if requested (default: preserve)
|
||||
if (options.preserveOrientation !== false && exifData.exif?.Orientation) {
|
||||
if (!sanitizedExif.exif) sanitizedExif.exif = {};
|
||||
sanitizedExif.exif.Orientation = exifData.exif.Orientation;
|
||||
}
|
||||
|
||||
// Preserve date/time if requested (default: preserve)
|
||||
if (options.preserveDateTime !== false && exifData.exif) {
|
||||
if (!sanitizedExif.exif) sanitizedExif.exif = {};
|
||||
if (exifData.exif.DateTime) sanitizedExif.exif.DateTime = exifData.exif.DateTime;
|
||||
if (exifData.exif.DateTimeOriginal) sanitizedExif.exif.DateTimeOriginal = exifData.exif.DateTimeOriginal;
|
||||
if (exifData.exif.DateTimeDigitized) sanitizedExif.exif.DateTimeDigitized = exifData.exif.DateTimeDigitized;
|
||||
}
|
||||
|
||||
// Apply sanitized EXIF data
|
||||
await this.preserveExif(filePath, sanitizedExif);
|
||||
|
||||
this.logger.debug(`EXIF data sanitized for: ${filePath}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to sanitize EXIF data for ${filePath}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract GPS coordinates from EXIF data
|
||||
*/
|
||||
extractGpsCoordinates(exifData: ExifData): GpsCoordinates | null {
|
||||
try {
|
||||
const gps = exifData.gps || exifData.exif;
|
||||
if (!gps) return null;
|
||||
|
||||
// Handle different GPS coordinate formats
|
||||
let latitude: number | undefined;
|
||||
let longitude: number | undefined;
|
||||
let altitude: number | undefined;
|
||||
|
||||
// Modern format (decimal degrees)
|
||||
if (typeof gps.latitude === 'number' && typeof gps.longitude === 'number') {
|
||||
latitude = gps.latitude;
|
||||
longitude = gps.longitude;
|
||||
altitude = gps.altitude;
|
||||
}
|
||||
// Legacy EXIF format (degrees, minutes, seconds)
|
||||
else if (gps.GPSLatitude && gps.GPSLongitude) {
|
||||
latitude = this.dmsToDecimal(gps.GPSLatitude, gps.GPSLatitudeRef);
|
||||
longitude = this.dmsToDecimal(gps.GPSLongitude, gps.GPSLongitudeRef);
|
||||
|
||||
if (gps.GPSAltitude) {
|
||||
altitude = gps.GPSAltitude;
|
||||
if (gps.GPSAltitudeRef === 1) {
|
||||
altitude = -altitude; // Below sea level
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (latitude !== undefined && longitude !== undefined) {
|
||||
const coordinates: GpsCoordinates = { latitude, longitude };
|
||||
if (altitude !== undefined) {
|
||||
coordinates.altitude = altitude;
|
||||
}
|
||||
return coordinates;
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to extract GPS coordinates:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get camera information from EXIF data
|
||||
*/
|
||||
getCameraInfo(exifData: ExifData): {
|
||||
make?: string;
|
||||
model?: string;
|
||||
software?: string;
|
||||
lens?: string;
|
||||
settings?: {
|
||||
fNumber?: number;
|
||||
exposureTime?: string;
|
||||
iso?: number;
|
||||
focalLength?: number;
|
||||
};
|
||||
} {
|
||||
const exif = exifData.exif || {};
|
||||
|
||||
return {
|
||||
make: exif.Make,
|
||||
model: exif.Model,
|
||||
software: exif.Software,
|
||||
lens: exif.LensModel,
|
||||
settings: {
|
||||
fNumber: exif.FNumber,
|
||||
exposureTime: exif.ExposureTime,
|
||||
iso: exif.ISO || exif.ISOSpeedRatings,
|
||||
focalLength: exif.FocalLength,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get image capture date from EXIF data
|
||||
*/
|
||||
getCaptureDate(exifData: ExifData): Date | null {
|
||||
try {
|
||||
const exif = exifData.exif || {};
|
||||
|
||||
// Try different date fields in order of preference
|
||||
const dateFields = [
|
||||
'DateTimeOriginal',
|
||||
'DateTimeDigitized',
|
||||
'DateTime',
|
||||
'CreateDate',
|
||||
];
|
||||
|
||||
for (const field of dateFields) {
|
||||
if (exif[field]) {
|
||||
const dateStr = exif[field];
|
||||
|
||||
// Parse EXIF date format: "YYYY:MM:DD HH:MM:SS"
|
||||
if (typeof dateStr === 'string') {
|
||||
const normalizedDate = dateStr.replace(/:/g, '-', 2);
|
||||
const date = new Date(normalizedDate);
|
||||
|
||||
if (!isNaN(date.getTime())) {
|
||||
return date;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to extract capture date:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare EXIF dictionary for piexif
|
||||
*/
|
||||
private prepareExifDict(exifData: ExifData): any {
|
||||
const exifDict: any = {};
|
||||
|
||||
try {
|
||||
// Map EXIF data to piexif format
|
||||
if (exifData.exif) {
|
||||
exifDict['Exif'] = this.convertExifTags(exifData.exif);
|
||||
}
|
||||
|
||||
if (exifData.tiff) {
|
||||
exifDict['0th'] = this.convertExifTags(exifData.tiff);
|
||||
}
|
||||
|
||||
if (exifData.gps) {
|
||||
exifDict['GPS'] = this.convertGpsTags(exifData.gps);
|
||||
}
|
||||
|
||||
// Handle thumbnail data if present
|
||||
if (exifData.exif && exifData.exif.thumbnail) {
|
||||
exifDict['1st'] = {};
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
this.logger.warn('Error preparing EXIF dictionary:', error.message);
|
||||
}
|
||||
|
||||
return exifDict;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert EXIF tags to piexif format
|
||||
*/
|
||||
private convertExifTags(tags: any): any {
|
||||
const converted: any = {};
|
||||
|
||||
for (const [key, value] of Object.entries(tags)) {
|
||||
if (value !== null && value !== undefined) {
|
||||
// Convert specific tag formats
|
||||
if (key === 'Orientation' && typeof value === 'number') {
|
||||
converted[piexif.ExifIFD.Orientation] = value;
|
||||
} else if (key === 'DateTime' && typeof value === 'string') {
|
||||
converted[piexif.ImageIFD.DateTime] = value;
|
||||
} else if (key === 'DateTimeOriginal' && typeof value === 'string') {
|
||||
converted[piexif.ExifIFD.DateTimeOriginal] = value;
|
||||
}
|
||||
// Add more tag conversions as needed
|
||||
}
|
||||
}
|
||||
|
||||
return converted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert GPS tags to piexif format
|
||||
*/
|
||||
private convertGpsTags(gps: any): any {
|
||||
const converted: any = {};
|
||||
|
||||
if (gps.latitude && gps.longitude) {
|
||||
const latDMS = this.decimalToDMS(Math.abs(gps.latitude));
|
||||
const lonDMS = this.decimalToDMS(Math.abs(gps.longitude));
|
||||
|
||||
converted[piexif.GPSIFD.GPSLatitude] = latDMS;
|
||||
converted[piexif.GPSIFD.GPSLatitudeRef] = gps.latitude >= 0 ? 'N' : 'S';
|
||||
converted[piexif.GPSIFD.GPSLongitude] = lonDMS;
|
||||
converted[piexif.GPSIFD.GPSLongitudeRef] = gps.longitude >= 0 ? 'E' : 'W';
|
||||
|
||||
if (gps.altitude) {
|
||||
converted[piexif.GPSIFD.GPSAltitude] = [Math.abs(gps.altitude) * 1000, 1000];
|
||||
converted[piexif.GPSIFD.GPSAltitudeRef] = gps.altitude >= 0 ? 0 : 1;
|
||||
}
|
||||
}
|
||||
|
||||
return converted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert DMS (Degrees, Minutes, Seconds) to decimal degrees
|
||||
*/
|
||||
private dmsToDecimal(dms: number[], ref: string): number {
|
||||
if (!Array.isArray(dms) || dms.length < 3) return 0;
|
||||
|
||||
const degrees = dms[0] || 0;
|
||||
const minutes = dms[1] || 0;
|
||||
const seconds = dms[2] || 0;
|
||||
|
||||
let decimal = degrees + minutes / 60 + seconds / 3600;
|
||||
|
||||
// Apply hemisphere reference
|
||||
if (ref === 'S' || ref === 'W') {
|
||||
decimal = -decimal;
|
||||
}
|
||||
|
||||
return decimal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert decimal degrees to DMS format
|
||||
*/
|
||||
private decimalToDMS(decimal: number): [number[], number[], number[]] {
|
||||
const degrees = Math.floor(decimal);
|
||||
const minutesFloat = (decimal - degrees) * 60;
|
||||
const minutes = Math.floor(minutesFloat);
|
||||
const seconds = (minutesFloat - minutes) * 60;
|
||||
|
||||
return [
|
||||
[degrees, 1],
|
||||
[minutes, 1],
|
||||
[Math.round(seconds * 1000), 1000], // Preserve precision
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file has EXIF data
|
||||
*/
|
||||
async hasExifData(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
const exifData = await this.extractExif(filePath);
|
||||
return !!(exifData.exif || exifData.tiff || exifData.gps);
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
480
packages/worker/src/storage/file-processor.service.ts
Normal file
480
packages/worker/src/storage/file-processor.service.ts
Normal file
|
@ -0,0 +1,480 @@
|
|||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import * as Sharp from 'sharp';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { ExifPreserverService } from './exif-preserver.service';
|
||||
import { fileTypeFromFile } from 'file-type';
|
||||
|
||||
export interface ImageMetadata {
|
||||
width: number;
|
||||
height: number;
|
||||
format: string;
|
||||
size: number;
|
||||
density?: number;
|
||||
hasAlpha: boolean;
|
||||
channels: number;
|
||||
space: string;
|
||||
exif?: any;
|
||||
iptc?: any;
|
||||
xmp?: any;
|
||||
}
|
||||
|
||||
export interface OptimizationOptions {
|
||||
quality?: number;
|
||||
maxWidth?: number;
|
||||
maxHeight?: number;
|
||||
format?: 'jpeg' | 'png' | 'webp' | 'auto';
|
||||
preserveExif?: boolean;
|
||||
progressive?: boolean;
|
||||
lossless?: boolean;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class FileProcessorService {
|
||||
private readonly logger = new Logger(FileProcessorService.name);
|
||||
private readonly tempDir: string;
|
||||
private readonly maxFileSize: number;
|
||||
private readonly allowedTypes: string[];
|
||||
|
||||
constructor(
|
||||
private configService: ConfigService,
|
||||
private exifPreserverService: ExifPreserverService,
|
||||
) {
|
||||
this.tempDir = this.configService.get<string>('TEMP_DIR', '/tmp/seo-worker');
|
||||
this.maxFileSize = this.configService.get<number>('MAX_FILE_SIZE', 50 * 1024 * 1024); // 50MB
|
||||
this.allowedTypes = this.configService.get<string>('ALLOWED_FILE_TYPES', 'jpg,jpeg,png,gif,webp').split(',');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract comprehensive metadata from image file
|
||||
*/
|
||||
async extractMetadata(filePath: string): Promise<ImageMetadata> {
|
||||
try {
|
||||
this.logger.debug(`Extracting metadata from: ${filePath}`);
|
||||
|
||||
// Validate file exists and is readable
|
||||
const fileStats = await fs.stat(filePath);
|
||||
if (fileStats.size > this.maxFileSize) {
|
||||
throw new Error(`File size ${fileStats.size} exceeds maximum allowed size ${this.maxFileSize}`);
|
||||
}
|
||||
|
||||
// Detect file type
|
||||
const fileType = await fileTypeFromFile(filePath);
|
||||
if (!fileType) {
|
||||
throw new Error('Unable to determine file type');
|
||||
}
|
||||
|
||||
// Validate file type is allowed
|
||||
const extension = fileType.ext.toLowerCase();
|
||||
if (!this.allowedTypes.includes(extension)) {
|
||||
throw new Error(`File type ${extension} is not allowed. Allowed types: ${this.allowedTypes.join(', ')}`);
|
||||
}
|
||||
|
||||
// Extract image metadata using Sharp
|
||||
const sharpInstance = Sharp(filePath);
|
||||
const sharpMetadata = await sharpInstance.metadata();
|
||||
|
||||
// Extract EXIF data
|
||||
const exifData = await this.exifPreserverService.extractExif(filePath);
|
||||
|
||||
const metadata: ImageMetadata = {
|
||||
width: sharpMetadata.width || 0,
|
||||
height: sharpMetadata.height || 0,
|
||||
format: sharpMetadata.format || extension,
|
||||
size: fileStats.size,
|
||||
density: sharpMetadata.density,
|
||||
hasAlpha: sharpMetadata.hasAlpha || false,
|
||||
channels: sharpMetadata.channels || 3,
|
||||
space: sharpMetadata.space || 'srgb',
|
||||
exif: exifData.exif,
|
||||
iptc: exifData.iptc,
|
||||
xmp: exifData.xmp,
|
||||
};
|
||||
|
||||
this.logger.debug(`Metadata extracted: ${metadata.width}x${metadata.height} ${metadata.format} (${metadata.size} bytes)`);
|
||||
return metadata;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to extract metadata from ${filePath}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Optimize image with various options
|
||||
*/
|
||||
async optimizeImage(
|
||||
filePath: string,
|
||||
options: OptimizationOptions = {}
|
||||
): Promise<string> {
|
||||
try {
|
||||
this.logger.debug(`Optimizing image: ${filePath}`);
|
||||
|
||||
// Extract original metadata if EXIF preservation is enabled
|
||||
let originalExif: any = null;
|
||||
if (options.preserveExif) {
|
||||
originalExif = await this.exifPreserverService.extractExif(filePath);
|
||||
}
|
||||
|
||||
// Generate unique output filename
|
||||
const outputFileName = `optimized_${uuidv4()}.${options.format || 'jpg'}`;
|
||||
const outputPath = path.join(this.tempDir, outputFileName);
|
||||
|
||||
// Initialize Sharp processing pipeline
|
||||
let pipeline = Sharp(filePath);
|
||||
|
||||
// Apply resizing if specified
|
||||
if (options.maxWidth || options.maxHeight) {
|
||||
pipeline = pipeline.resize(options.maxWidth, options.maxHeight, {
|
||||
fit: 'inside',
|
||||
withoutEnlargement: true,
|
||||
});
|
||||
}
|
||||
|
||||
// Apply format-specific optimizations
|
||||
const quality = options.quality || 85;
|
||||
const progressive = options.progressive !== false;
|
||||
|
||||
switch (options.format) {
|
||||
case 'jpeg':
|
||||
pipeline = pipeline.jpeg({
|
||||
quality,
|
||||
progressive,
|
||||
mozjpeg: true, // Use mozjpeg for better compression
|
||||
});
|
||||
break;
|
||||
|
||||
case 'png':
|
||||
pipeline = pipeline.png({
|
||||
quality,
|
||||
progressive,
|
||||
compressionLevel: 9,
|
||||
adaptiveFiltering: true,
|
||||
});
|
||||
break;
|
||||
|
||||
case 'webp':
|
||||
pipeline = pipeline.webp({
|
||||
quality,
|
||||
lossless: options.lossless || false,
|
||||
effort: 6, // High effort for better compression
|
||||
});
|
||||
break;
|
||||
|
||||
default:
|
||||
// Auto-detect best format based on content
|
||||
const metadata = await pipeline.metadata();
|
||||
if (metadata.hasAlpha) {
|
||||
pipeline = pipeline.png({ quality, progressive });
|
||||
} else {
|
||||
pipeline = pipeline.jpeg({ quality, progressive, mozjpeg: true });
|
||||
}
|
||||
}
|
||||
|
||||
// Process and save the image
|
||||
await pipeline.toFile(outputPath);
|
||||
|
||||
// Restore EXIF data if preservation was requested
|
||||
if (options.preserveExif && originalExif) {
|
||||
await this.exifPreserverService.preserveExif(outputPath, originalExif);
|
||||
}
|
||||
|
||||
// Log optimization results
|
||||
const originalStats = await fs.stat(filePath);
|
||||
const optimizedStats = await fs.stat(outputPath);
|
||||
const compressionRatio = ((originalStats.size - optimizedStats.size) / originalStats.size * 100).toFixed(1);
|
||||
|
||||
this.logger.debug(
|
||||
`Image optimized: ${originalStats.size} -> ${optimizedStats.size} bytes (${compressionRatio}% reduction)`
|
||||
);
|
||||
|
||||
return outputPath;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to optimize image ${filePath}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create thumbnail image
|
||||
*/
|
||||
async createThumbnail(
|
||||
filePath: string,
|
||||
width: number = 300,
|
||||
height: number = 300,
|
||||
quality: number = 80
|
||||
): Promise<string> {
|
||||
try {
|
||||
const thumbnailFileName = `thumb_${uuidv4()}.jpg`;
|
||||
const thumbnailPath = path.join(this.tempDir, thumbnailFileName);
|
||||
|
||||
await Sharp(filePath)
|
||||
.resize(width, height, {
|
||||
fit: 'cover',
|
||||
position: 'center',
|
||||
})
|
||||
.jpeg({ quality, progressive: true })
|
||||
.toFile(thumbnailPath);
|
||||
|
||||
this.logger.debug(`Thumbnail created: ${thumbnailPath} (${width}x${height})`);
|
||||
return thumbnailPath;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to create thumbnail for ${filePath}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert image to different format
|
||||
*/
|
||||
async convertFormat(
|
||||
filePath: string,
|
||||
targetFormat: 'jpeg' | 'png' | 'webp',
|
||||
quality: number = 85
|
||||
): Promise<string> {
|
||||
try {
|
||||
const convertedFileName = `converted_${uuidv4()}.${targetFormat}`;
|
||||
const convertedPath = path.join(this.tempDir, convertedFileName);
|
||||
|
||||
let pipeline = Sharp(filePath);
|
||||
|
||||
switch (targetFormat) {
|
||||
case 'jpeg':
|
||||
pipeline = pipeline.jpeg({ quality, progressive: true, mozjpeg: true });
|
||||
break;
|
||||
case 'png':
|
||||
pipeline = pipeline.png({ quality, progressive: true });
|
||||
break;
|
||||
case 'webp':
|
||||
pipeline = pipeline.webp({ quality, effort: 6 });
|
||||
break;
|
||||
}
|
||||
|
||||
await pipeline.toFile(convertedPath);
|
||||
|
||||
this.logger.debug(`Image converted to ${targetFormat}: ${convertedPath}`);
|
||||
return convertedPath;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to convert image ${filePath} to ${targetFormat}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rotate image based on EXIF orientation
|
||||
*/
|
||||
async autoRotate(filePath: string): Promise<string> {
|
||||
try {
|
||||
const rotatedFileName = `rotated_${uuidv4()}.jpg`;
|
||||
const rotatedPath = path.join(this.tempDir, rotatedFileName);
|
||||
|
||||
await Sharp(filePath)
|
||||
.rotate() // Auto-rotate based on EXIF orientation
|
||||
.jpeg({ quality: 95, progressive: true })
|
||||
.toFile(rotatedPath);
|
||||
|
||||
this.logger.debug(`Image auto-rotated: ${rotatedPath}`);
|
||||
return rotatedPath;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to auto-rotate image ${filePath}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate multiple sizes of an image
|
||||
*/
|
||||
async generateMultipleSizes(
|
||||
filePath: string,
|
||||
sizes: Array<{ width: number; height: number; suffix: string }>
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
const generatedFiles: string[] = [];
|
||||
|
||||
for (const size of sizes) {
|
||||
const sizedFileName = `${size.suffix}_${uuidv4()}.jpg`;
|
||||
const sizedPath = path.join(this.tempDir, sizedFileName);
|
||||
|
||||
await Sharp(filePath)
|
||||
.resize(size.width, size.height, {
|
||||
fit: 'inside',
|
||||
withoutEnlargement: true,
|
||||
})
|
||||
.jpeg({ quality: 85, progressive: true })
|
||||
.toFile(sizedPath);
|
||||
|
||||
generatedFiles.push(sizedPath);
|
||||
}
|
||||
|
||||
this.logger.debug(`Generated ${generatedFiles.length} different sizes`);
|
||||
return generatedFiles;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to generate multiple sizes for ${filePath}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply watermark to image
|
||||
*/
|
||||
async applyWatermark(
|
||||
filePath: string,
|
||||
watermarkPath: string,
|
||||
position: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right' | 'center' = 'bottom-right',
|
||||
opacity: number = 0.5
|
||||
): Promise<string> {
|
||||
try {
|
||||
const watermarkedFileName = `watermarked_${uuidv4()}.jpg`;
|
||||
const watermarkedPath = path.join(this.tempDir, watermarkedFileName);
|
||||
|
||||
// Prepare watermark
|
||||
const watermark = await Sharp(watermarkPath)
|
||||
.png()
|
||||
.composite([{
|
||||
input: Buffer.from([255, 255, 255, Math.round(255 * opacity)]),
|
||||
raw: { width: 1, height: 1, channels: 4 },
|
||||
tile: true,
|
||||
blend: 'dest-in'
|
||||
}])
|
||||
.toBuffer();
|
||||
|
||||
// Determine position
|
||||
const gravity = this.getGravityFromPosition(position);
|
||||
|
||||
await Sharp(filePath)
|
||||
.composite([{ input: watermark, gravity }])
|
||||
.jpeg({ quality: 90, progressive: true })
|
||||
.toFile(watermarkedPath);
|
||||
|
||||
this.logger.debug(`Watermark applied: ${watermarkedPath}`);
|
||||
return watermarkedPath;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to apply watermark to ${filePath}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate image file integrity
|
||||
*/
|
||||
async validateImage(filePath: string): Promise<{
|
||||
valid: boolean;
|
||||
error?: string;
|
||||
metadata?: ImageMetadata;
|
||||
}> {
|
||||
try {
|
||||
// Try to extract metadata - this will fail if image is corrupted
|
||||
const metadata = await this.extractMetadata(filePath);
|
||||
|
||||
// Try to create a test thumbnail - this will catch most corruption issues
|
||||
const testThumb = await this.createThumbnail(filePath, 100, 100);
|
||||
await this.cleanupTempFile(testThumb);
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
metadata,
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up temporary file
|
||||
*/
|
||||
async cleanupTempFile(filePath: string): Promise<void> {
|
||||
try {
|
||||
// Safety check: only delete files in our temp directory
|
||||
if (!filePath.startsWith(this.tempDir)) {
|
||||
this.logger.warn(`Skipping cleanup of file outside temp directory: ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.unlink(filePath);
|
||||
this.logger.debug(`Temporary file cleaned up: ${filePath}`);
|
||||
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
this.logger.warn(`Failed to cleanup temporary file ${filePath}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch cleanup of old temporary files
|
||||
*/
|
||||
async cleanupOldTempFiles(maxAge: number = 3600000): Promise<number> {
|
||||
try {
|
||||
const files = await fs.readdir(this.tempDir);
|
||||
const now = Date.now();
|
||||
let cleanedCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const filePath = path.join(this.tempDir, file);
|
||||
const stats = await fs.stat(filePath);
|
||||
const age = now - stats.mtime.getTime();
|
||||
|
||||
if (age > maxAge) {
|
||||
await fs.unlink(filePath);
|
||||
cleanedCount++;
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip files that can't be processed
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (cleanedCount > 0) {
|
||||
this.logger.log(`Cleaned up ${cleanedCount} old temporary files`);
|
||||
}
|
||||
|
||||
return cleanedCount;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to cleanup old temporary files:', error.message);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private getGravityFromPosition(
|
||||
position: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right' | 'center'
|
||||
): string {
|
||||
const gravityMap = {
|
||||
'top-left': 'northwest',
|
||||
'top-right': 'northeast',
|
||||
'bottom-left': 'southwest',
|
||||
'bottom-right': 'southeast',
|
||||
'center': 'center',
|
||||
};
|
||||
|
||||
return gravityMap[position] || 'southeast';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get processing statistics
|
||||
*/
|
||||
getProcessingStats(): {
|
||||
tempDir: string;
|
||||
maxFileSize: number;
|
||||
allowedTypes: string[];
|
||||
} {
|
||||
return {
|
||||
tempDir: this.tempDir,
|
||||
maxFileSize: this.maxFileSize,
|
||||
allowedTypes: this.allowedTypes,
|
||||
};
|
||||
}
|
||||
}
|
367
packages/worker/src/storage/minio.service.ts
Normal file
367
packages/worker/src/storage/minio.service.ts
Normal file
|
@ -0,0 +1,367 @@
|
|||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Client as MinioClient, BucketItem } from 'minio';
|
||||
import { StorageProvider } from './storage.service';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
@Injectable()
|
||||
export class MinioService implements StorageProvider {
|
||||
private readonly logger = new Logger(MinioService.name);
|
||||
private readonly client: MinioClient;
|
||||
private readonly bucketName: string;
|
||||
|
||||
constructor(private configService: ConfigService) {
|
||||
const endpoint = this.configService.get<string>('MINIO_ENDPOINT');
|
||||
const port = this.configService.get<number>('MINIO_PORT', 9000);
|
||||
const useSSL = this.configService.get<boolean>('MINIO_USE_SSL', false);
|
||||
const accessKey = this.configService.get<string>('MINIO_ACCESS_KEY');
|
||||
const secretKey = this.configService.get<string>('MINIO_SECRET_KEY');
|
||||
|
||||
if (!endpoint || !accessKey || !secretKey) {
|
||||
throw new Error('MinIO configuration incomplete. Required: MINIO_ENDPOINT, MINIO_ACCESS_KEY, MINIO_SECRET_KEY');
|
||||
}
|
||||
|
||||
this.bucketName = this.configService.get<string>('MINIO_BUCKET_NAME', 'seo-images');
|
||||
|
||||
this.client = new MinioClient({
|
||||
endPoint: endpoint,
|
||||
port,
|
||||
useSSL,
|
||||
accessKey,
|
||||
secretKey,
|
||||
});
|
||||
|
||||
this.logger.log(`MinIO client initialized: ${endpoint}:${port} (SSL: ${useSSL})`);
|
||||
this.initializeBucket();
|
||||
}
|
||||
|
||||
private async initializeBucket(): Promise<void> {
|
||||
try {
|
||||
const bucketExists = await this.client.bucketExists(this.bucketName);
|
||||
|
||||
if (!bucketExists) {
|
||||
await this.client.makeBucket(this.bucketName, 'us-east-1');
|
||||
this.logger.log(`Created MinIO bucket: ${this.bucketName}`);
|
||||
} else {
|
||||
this.logger.log(`MinIO bucket exists: ${this.bucketName}`);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to initialize MinIO bucket ${this.bucketName}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async uploadFile(filePath: string, key: string, metadata?: any): Promise<string> {
|
||||
try {
|
||||
// Prepare metadata
|
||||
const fileStats = fs.statSync(filePath);
|
||||
const metadataObj = {
|
||||
'Content-Type': this.getContentType(filePath),
|
||||
'X-Amz-Meta-Upload-Time': new Date().toISOString(),
|
||||
'X-Amz-Meta-Original-Name': path.basename(filePath),
|
||||
...metadata,
|
||||
};
|
||||
|
||||
// Upload file
|
||||
await this.client.fPutObject(
|
||||
this.bucketName,
|
||||
key,
|
||||
filePath,
|
||||
metadataObj
|
||||
);
|
||||
|
||||
this.logger.debug(`File uploaded to MinIO: ${key} (${fileStats.size} bytes)`);
|
||||
|
||||
// Return the object URL
|
||||
return `${this.getEndpointUrl()}/${this.bucketName}/${key}`;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to upload file to MinIO: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async downloadFile(key: string, destPath: string): Promise<void> {
|
||||
try {
|
||||
// Ensure destination directory exists
|
||||
const destDir = path.dirname(destPath);
|
||||
fs.mkdirSync(destDir, { recursive: true });
|
||||
|
||||
// Download file
|
||||
await this.client.fGetObject(this.bucketName, key, destPath);
|
||||
|
||||
this.logger.debug(`File downloaded from MinIO: ${key} -> ${destPath}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to download file from MinIO: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async deleteFile(key: string): Promise<void> {
|
||||
try {
|
||||
await this.client.removeObject(this.bucketName, key);
|
||||
this.logger.debug(`File deleted from MinIO: ${key}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to delete file from MinIO: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async moveFile(sourceKey: string, destKey: string): Promise<void> {
|
||||
try {
|
||||
// Copy file to new location
|
||||
await this.client.copyObject(
|
||||
this.bucketName,
|
||||
destKey,
|
||||
`/${this.bucketName}/${sourceKey}`
|
||||
);
|
||||
|
||||
// Delete original file
|
||||
await this.client.removeObject(this.bucketName, sourceKey);
|
||||
|
||||
this.logger.debug(`File moved in MinIO: ${sourceKey} -> ${destKey}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to move file in MinIO: ${sourceKey} -> ${destKey}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getPublicUrl(key: string): Promise<string> {
|
||||
// MinIO doesn't have built-in public URLs, so we return the direct URL
|
||||
// This assumes the bucket is configured for public read access
|
||||
return `${this.getEndpointUrl()}/${this.bucketName}/${key}`;
|
||||
}
|
||||
|
||||
async generateSignedUrl(key: string, expiresIn: number): Promise<string> {
|
||||
try {
|
||||
// Generate presigned URL for GET request
|
||||
const signedUrl = await this.client.presignedGetObject(
|
||||
this.bucketName,
|
||||
key,
|
||||
expiresIn
|
||||
);
|
||||
|
||||
this.logger.debug(`Generated signed URL for MinIO object: ${key} (expires in ${expiresIn}s)`);
|
||||
return signedUrl;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to generate signed URL for MinIO object: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async fileExists(key: string): Promise<boolean> {
|
||||
try {
|
||||
await this.client.statObject(this.bucketName, key);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error.code === 'NotFound') {
|
||||
return false;
|
||||
}
|
||||
this.logger.error(`Error checking if file exists in MinIO: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getFileMetadata(key: string): Promise<any> {
|
||||
try {
|
||||
const stat = await this.client.statObject(this.bucketName, key);
|
||||
|
||||
return {
|
||||
size: stat.size,
|
||||
lastModified: stat.lastModified,
|
||||
etag: stat.etag,
|
||||
contentType: stat.metaData['content-type'],
|
||||
metadata: stat.metaData,
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to get metadata for MinIO object: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async listFiles(prefix?: string, maxKeys: number = 1000): Promise<string[]> {
|
||||
try {
|
||||
const objects: BucketItem[] = [];
|
||||
const stream = this.client.listObjects(this.bucketName, prefix, true);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('data', (obj) => {
|
||||
objects.push(obj);
|
||||
if (objects.length >= maxKeys) {
|
||||
stream.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
const keys = objects.map(obj => obj.name).filter(name => name !== undefined) as string[];
|
||||
resolve(keys);
|
||||
});
|
||||
|
||||
stream.on('error', (error) => {
|
||||
this.logger.error('Error listing MinIO objects:', error.message);
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to list MinIO objects:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload file from buffer/stream
|
||||
*/
|
||||
async uploadBuffer(
|
||||
buffer: Buffer,
|
||||
key: string,
|
||||
contentType?: string,
|
||||
metadata?: any
|
||||
): Promise<string> {
|
||||
try {
|
||||
const metadataObj = {
|
||||
'Content-Type': contentType || 'application/octet-stream',
|
||||
'X-Amz-Meta-Upload-Time': new Date().toISOString(),
|
||||
...metadata,
|
||||
};
|
||||
|
||||
await this.client.putObject(
|
||||
this.bucketName,
|
||||
key,
|
||||
buffer,
|
||||
buffer.length,
|
||||
metadataObj
|
||||
);
|
||||
|
||||
this.logger.debug(`Buffer uploaded to MinIO: ${key} (${buffer.length} bytes)`);
|
||||
return `${this.getEndpointUrl()}/${this.bucketName}/${key}`;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to upload buffer to MinIO: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file as buffer
|
||||
*/
|
||||
async getFileBuffer(key: string): Promise<Buffer> {
|
||||
try {
|
||||
const stream = await this.client.getObject(this.bucketName, key);
|
||||
const chunks: Buffer[] = [];
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('data', (chunk) => chunks.push(chunk));
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
stream.on('error', reject);
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to get buffer from MinIO: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate upload URL for direct client uploads
|
||||
*/
|
||||
async generateUploadUrl(
|
||||
key: string,
|
||||
expiresIn: number = 3600,
|
||||
conditions?: any
|
||||
): Promise<{ url: string; fields: any }> {
|
||||
try {
|
||||
const policy = this.client.newPostPolicy();
|
||||
policy.setBucket(this.bucketName);
|
||||
policy.setKey(key);
|
||||
policy.setExpires(new Date(Date.now() + expiresIn * 1000));
|
||||
|
||||
if (conditions) {
|
||||
// Add custom conditions to policy
|
||||
for (const [field, value] of Object.entries(conditions)) {
|
||||
policy.setContentLengthRange(0, value as number);
|
||||
}
|
||||
}
|
||||
|
||||
const result = await this.client.presignedPostPolicy(policy);
|
||||
|
||||
this.logger.debug(`Generated upload URL for MinIO: ${key}`);
|
||||
return {
|
||||
url: result.postURL,
|
||||
fields: result.formData,
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to generate upload URL for MinIO: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get bucket statistics
|
||||
*/
|
||||
async getBucketStats(): Promise<{
|
||||
name: string;
|
||||
objectCount: number;
|
||||
totalSize: number;
|
||||
}> {
|
||||
try {
|
||||
const objects: BucketItem[] = [];
|
||||
const stream = this.client.listObjects(this.bucketName, '', true);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('data', (obj) => objects.push(obj));
|
||||
|
||||
stream.on('end', () => {
|
||||
const totalSize = objects.reduce((sum, obj) => sum + (obj.size || 0), 0);
|
||||
resolve({
|
||||
name: this.bucketName,
|
||||
objectCount: objects.length,
|
||||
totalSize,
|
||||
});
|
||||
});
|
||||
|
||||
stream.on('error', reject);
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get MinIO bucket stats:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private getContentType(filePath: string): string {
|
||||
const ext = path.extname(filePath).toLowerCase();
|
||||
const mimeTypes: { [key: string]: string } = {
|
||||
'.jpg': 'image/jpeg',
|
||||
'.jpeg': 'image/jpeg',
|
||||
'.png': 'image/png',
|
||||
'.gif': 'image/gif',
|
||||
'.webp': 'image/webp',
|
||||
'.svg': 'image/svg+xml',
|
||||
'.pdf': 'application/pdf',
|
||||
'.zip': 'application/zip',
|
||||
'.txt': 'text/plain',
|
||||
'.json': 'application/json',
|
||||
};
|
||||
|
||||
return mimeTypes[ext] || 'application/octet-stream';
|
||||
}
|
||||
|
||||
private getEndpointUrl(): string {
|
||||
const endpoint = this.configService.get<string>('MINIO_ENDPOINT');
|
||||
const port = this.configService.get<number>('MINIO_PORT', 9000);
|
||||
const useSSL = this.configService.get<boolean>('MINIO_USE_SSL', false);
|
||||
|
||||
const protocol = useSSL ? 'https' : 'http';
|
||||
const portSuffix = (useSSL && port === 443) || (!useSSL && port === 80) ? '' : `:${port}`;
|
||||
|
||||
return `${protocol}://${endpoint}${portSuffix}`;
|
||||
}
|
||||
}
|
401
packages/worker/src/storage/s3.service.ts
Normal file
401
packages/worker/src/storage/s3.service.ts
Normal file
|
@ -0,0 +1,401 @@
|
|||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { S3 } from 'aws-sdk';
|
||||
import { StorageProvider } from './storage.service';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
@Injectable()
|
||||
export class S3Service implements StorageProvider {
|
||||
private readonly logger = new Logger(S3Service.name);
|
||||
private readonly s3: S3;
|
||||
private readonly bucketName: string;
|
||||
|
||||
constructor(private configService: ConfigService) {
|
||||
const region = this.configService.get<string>('AWS_REGION', 'us-east-1');
|
||||
const accessKeyId = this.configService.get<string>('AWS_ACCESS_KEY_ID');
|
||||
const secretAccessKey = this.configService.get<string>('AWS_SECRET_ACCESS_KEY');
|
||||
|
||||
if (!accessKeyId || !secretAccessKey) {
|
||||
throw new Error('AWS S3 configuration incomplete. Required: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY');
|
||||
}
|
||||
|
||||
this.bucketName = this.configService.get<string>('AWS_BUCKET_NAME');
|
||||
if (!this.bucketName) {
|
||||
throw new Error('AWS_BUCKET_NAME is required for S3 storage');
|
||||
}
|
||||
|
||||
this.s3 = new S3({
|
||||
region,
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
signatureVersion: 'v4',
|
||||
});
|
||||
|
||||
this.logger.log(`AWS S3 client initialized: ${this.bucketName} (${region})`);
|
||||
}
|
||||
|
||||
async uploadFile(filePath: string, key: string, metadata?: any): Promise<string> {
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
const fileStats = fs.statSync(filePath);
|
||||
|
||||
const uploadParams: S3.PutObjectRequest = {
|
||||
Bucket: this.bucketName,
|
||||
Key: key,
|
||||
Body: fileStream,
|
||||
ContentType: this.getContentType(filePath),
|
||||
Metadata: {
|
||||
'upload-time': new Date().toISOString(),
|
||||
'original-name': path.basename(filePath),
|
||||
...metadata,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await this.s3.upload(uploadParams).promise();
|
||||
|
||||
this.logger.debug(`File uploaded to S3: ${key} (${fileStats.size} bytes)`);
|
||||
return result.Location;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to upload file to S3: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async downloadFile(key: string, destPath: string): Promise<void> {
|
||||
try {
|
||||
// Ensure destination directory exists
|
||||
const destDir = path.dirname(destPath);
|
||||
fs.mkdirSync(destDir, { recursive: true });
|
||||
|
||||
const downloadParams: S3.GetObjectRequest = {
|
||||
Bucket: this.bucketName,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
const result = await this.s3.getObject(downloadParams).promise();
|
||||
|
||||
if (!result.Body) {
|
||||
throw new Error('No data received from S3');
|
||||
}
|
||||
|
||||
// Write file to destination
|
||||
fs.writeFileSync(destPath, result.Body as Buffer);
|
||||
|
||||
this.logger.debug(`File downloaded from S3: ${key} -> ${destPath}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to download file from S3: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async deleteFile(key: string): Promise<void> {
|
||||
try {
|
||||
const deleteParams: S3.DeleteObjectRequest = {
|
||||
Bucket: this.bucketName,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
await this.s3.deleteObject(deleteParams).promise();
|
||||
this.logger.debug(`File deleted from S3: ${key}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to delete file from S3: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async moveFile(sourceKey: string, destKey: string): Promise<void> {
|
||||
try {
|
||||
// Copy object to new location
|
||||
const copyParams: S3.CopyObjectRequest = {
|
||||
Bucket: this.bucketName,
|
||||
CopySource: `${this.bucketName}/${sourceKey}`,
|
||||
Key: destKey,
|
||||
};
|
||||
|
||||
await this.s3.copyObject(copyParams).promise();
|
||||
|
||||
// Delete original object
|
||||
await this.deleteFile(sourceKey);
|
||||
|
||||
this.logger.debug(`File moved in S3: ${sourceKey} -> ${destKey}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to move file in S3: ${sourceKey} -> ${destKey}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getPublicUrl(key: string): Promise<string> {
|
||||
// Return the public S3 URL (assumes bucket is public)
|
||||
const region = this.configService.get<string>('AWS_REGION', 'us-east-1');
|
||||
return `https://${this.bucketName}.s3.${region}.amazonaws.com/${key}`;
|
||||
}
|
||||
|
||||
async generateSignedUrl(key: string, expiresIn: number): Promise<string> {
|
||||
try {
|
||||
const params: S3.GetObjectRequest = {
|
||||
Bucket: this.bucketName,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
const signedUrl = this.s3.getSignedUrl('getObject', {
|
||||
...params,
|
||||
Expires: expiresIn,
|
||||
});
|
||||
|
||||
this.logger.debug(`Generated signed URL for S3 object: ${key} (expires in ${expiresIn}s)`);
|
||||
return signedUrl;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to generate signed URL for S3 object: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async fileExists(key: string): Promise<boolean> {
|
||||
try {
|
||||
const params: S3.HeadObjectRequest = {
|
||||
Bucket: this.bucketName,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
await this.s3.headObject(params).promise();
|
||||
return true;
|
||||
|
||||
} catch (error) {
|
||||
if (error.code === 'NotFound' || error.statusCode === 404) {
|
||||
return false;
|
||||
}
|
||||
this.logger.error(`Error checking if file exists in S3: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getFileMetadata(key: string): Promise<any> {
|
||||
try {
|
||||
const params: S3.HeadObjectRequest = {
|
||||
Bucket: this.bucketName,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
const result = await this.s3.headObject(params).promise();
|
||||
|
||||
return {
|
||||
size: result.ContentLength,
|
||||
lastModified: result.LastModified,
|
||||
etag: result.ETag,
|
||||
contentType: result.ContentType,
|
||||
metadata: result.Metadata,
|
||||
storageClass: result.StorageClass,
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to get metadata for S3 object: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async listFiles(prefix?: string, maxKeys: number = 1000): Promise<string[]> {
|
||||
try {
|
||||
const params: S3.ListObjectsV2Request = {
|
||||
Bucket: this.bucketName,
|
||||
Prefix: prefix,
|
||||
MaxKeys: maxKeys,
|
||||
};
|
||||
|
||||
const result = await this.s3.listObjectsV2(params).promise();
|
||||
|
||||
return (result.Contents || [])
|
||||
.map(obj => obj.Key)
|
||||
.filter(key => key !== undefined) as string[];
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to list S3 objects:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload file from buffer
|
||||
*/
|
||||
async uploadBuffer(
|
||||
buffer: Buffer,
|
||||
key: string,
|
||||
contentType?: string,
|
||||
metadata?: any
|
||||
): Promise<string> {
|
||||
try {
|
||||
const uploadParams: S3.PutObjectRequest = {
|
||||
Bucket: this.bucketName,
|
||||
Key: key,
|
||||
Body: buffer,
|
||||
ContentType: contentType || 'application/octet-stream',
|
||||
Metadata: {
|
||||
'upload-time': new Date().toISOString(),
|
||||
...metadata,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await this.s3.upload(uploadParams).promise();
|
||||
|
||||
this.logger.debug(`Buffer uploaded to S3: ${key} (${buffer.length} bytes)`);
|
||||
return result.Location;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to upload buffer to S3: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file as buffer
|
||||
*/
|
||||
async getFileBuffer(key: string): Promise<Buffer> {
|
||||
try {
|
||||
const params: S3.GetObjectRequest = {
|
||||
Bucket: this.bucketName,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
const result = await this.s3.getObject(params).promise();
|
||||
|
||||
if (!result.Body) {
|
||||
throw new Error('No data received from S3');
|
||||
}
|
||||
|
||||
return result.Body as Buffer;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to get buffer from S3: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate upload URL for direct client uploads
|
||||
*/
|
||||
async generateUploadUrl(
|
||||
key: string,
|
||||
expiresIn: number = 3600,
|
||||
conditions?: any
|
||||
): Promise<{ url: string; fields: any }> {
|
||||
try {
|
||||
const params: any = {
|
||||
Bucket: this.bucketName,
|
||||
Fields: {
|
||||
key,
|
||||
},
|
||||
Expires: expiresIn,
|
||||
};
|
||||
|
||||
if (conditions) {
|
||||
params.Conditions = conditions;
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this.s3.createPresignedPost(params, (error, data) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
this.logger.debug(`Generated upload URL for S3: ${key}`);
|
||||
resolve({
|
||||
url: data.url,
|
||||
fields: data.fields,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to generate upload URL for S3: ${key}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get bucket statistics
|
||||
*/
|
||||
async getBucketStats(): Promise<{
|
||||
name: string;
|
||||
objectCount: number;
|
||||
totalSize: number;
|
||||
}> {
|
||||
try {
|
||||
const params: S3.ListObjectsV2Request = {
|
||||
Bucket: this.bucketName,
|
||||
};
|
||||
|
||||
let objectCount = 0;
|
||||
let totalSize = 0;
|
||||
let continuationToken: string | undefined;
|
||||
|
||||
do {
|
||||
if (continuationToken) {
|
||||
params.ContinuationToken = continuationToken;
|
||||
}
|
||||
|
||||
const result = await this.s3.listObjectsV2(params).promise();
|
||||
|
||||
if (result.Contents) {
|
||||
objectCount += result.Contents.length;
|
||||
totalSize += result.Contents.reduce((sum, obj) => sum + (obj.Size || 0), 0);
|
||||
}
|
||||
|
||||
continuationToken = result.NextContinuationToken;
|
||||
} while (continuationToken);
|
||||
|
||||
return {
|
||||
name: this.bucketName,
|
||||
objectCount,
|
||||
totalSize,
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get S3 bucket stats:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable versioning on bucket
|
||||
*/
|
||||
async enableVersioning(): Promise<void> {
|
||||
try {
|
||||
const params: S3.PutBucketVersioningRequest = {
|
||||
Bucket: this.bucketName,
|
||||
VersioningConfiguration: {
|
||||
Status: 'Enabled',
|
||||
},
|
||||
};
|
||||
|
||||
await this.s3.putBucketVersioning(params).promise();
|
||||
this.logger.log(`Versioning enabled for S3 bucket: ${this.bucketName}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to enable versioning for S3 bucket: ${this.bucketName}`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private getContentType(filePath: string): string {
|
||||
const ext = path.extname(filePath).toLowerCase();
|
||||
const mimeTypes: { [key: string]: string } = {
|
||||
'.jpg': 'image/jpeg',
|
||||
'.jpeg': 'image/jpeg',
|
||||
'.png': 'image/png',
|
||||
'.gif': 'image/gif',
|
||||
'.webp': 'image/webp',
|
||||
'.svg': 'image/svg+xml',
|
||||
'.pdf': 'application/pdf',
|
||||
'.zip': 'application/zip',
|
||||
'.txt': 'text/plain',
|
||||
'.json': 'application/json',
|
||||
};
|
||||
|
||||
return mimeTypes[ext] || 'application/octet-stream';
|
||||
}
|
||||
}
|
29
packages/worker/src/storage/storage.module.ts
Normal file
29
packages/worker/src/storage/storage.module.ts
Normal file
|
@ -0,0 +1,29 @@
|
|||
import { Module } from '@nestjs/common';
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
import { StorageService } from './storage.service';
|
||||
import { MinioService } from './minio.service';
|
||||
import { S3Service } from './s3.service';
|
||||
import { FileProcessorService } from './file-processor.service';
|
||||
import { ExifPreserverService } from './exif-preserver.service';
|
||||
import { ZipCreatorService } from './zip-creator.service';
|
||||
|
||||
@Module({
|
||||
imports: [ConfigModule],
|
||||
providers: [
|
||||
StorageService,
|
||||
MinioService,
|
||||
S3Service,
|
||||
FileProcessorService,
|
||||
ExifPreserverService,
|
||||
ZipCreatorService,
|
||||
],
|
||||
exports: [
|
||||
StorageService,
|
||||
MinioService,
|
||||
S3Service,
|
||||
FileProcessorService,
|
||||
ExifPreserverService,
|
||||
ZipCreatorService,
|
||||
],
|
||||
})
|
||||
export class StorageModule {}
|
343
packages/worker/src/storage/storage.service.ts
Normal file
343
packages/worker/src/storage/storage.service.ts
Normal file
|
@ -0,0 +1,343 @@
|
|||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { MinioService } from './minio.service';
|
||||
import { S3Service } from './s3.service';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
export interface StorageProvider {
|
||||
uploadFile(filePath: string, key: string, metadata?: any): Promise<string>;
|
||||
downloadFile(key: string, destPath: string): Promise<void>;
|
||||
deleteFile(key: string): Promise<void>;
|
||||
moveFile(sourceKey: string, destKey: string): Promise<void>;
|
||||
getPublicUrl(key: string): Promise<string>;
|
||||
generateSignedUrl(key: string, expiresIn: number): Promise<string>;
|
||||
fileExists(key: string): Promise<boolean>;
|
||||
getFileMetadata(key: string): Promise<any>;
|
||||
listFiles(prefix?: string, maxKeys?: number): Promise<string[]>;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class StorageService {
|
||||
private readonly logger = new Logger(StorageService.name);
|
||||
private readonly provider: StorageProvider;
|
||||
private readonly tempDir: string;
|
||||
|
||||
constructor(
|
||||
private configService: ConfigService,
|
||||
private minioService: MinioService,
|
||||
private s3Service: S3Service,
|
||||
) {
|
||||
// Determine which storage provider to use
|
||||
const useMinIO = !!this.configService.get<string>('MINIO_ENDPOINT');
|
||||
const useS3 = !!this.configService.get<string>('AWS_BUCKET_NAME');
|
||||
|
||||
if (useMinIO) {
|
||||
this.provider = this.minioService;
|
||||
this.logger.log('Using MinIO storage provider');
|
||||
} else if (useS3) {
|
||||
this.provider = this.s3Service;
|
||||
this.logger.log('Using AWS S3 storage provider');
|
||||
} else {
|
||||
throw new Error('No storage provider configured. Please configure either MinIO or AWS S3.');
|
||||
}
|
||||
|
||||
this.tempDir = this.configService.get<string>('TEMP_DIR', '/tmp/seo-worker');
|
||||
this.initializeTempDirectory();
|
||||
}
|
||||
|
||||
private async initializeTempDirectory(): Promise<void> {
|
||||
try {
|
||||
await fs.mkdir(this.tempDir, { recursive: true });
|
||||
this.logger.log(`Temporary directory initialized: ${this.tempDir}`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to create temp directory ${this.tempDir}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload file to storage
|
||||
*/
|
||||
async uploadFile(
|
||||
filePath: string,
|
||||
key: string,
|
||||
metadata?: { [key: string]: string }
|
||||
): Promise<string> {
|
||||
try {
|
||||
this.logger.debug(`Uploading file: ${filePath} -> ${key}`);
|
||||
|
||||
const uploadedUrl = await this.provider.uploadFile(filePath, key, metadata);
|
||||
|
||||
this.logger.debug(`File uploaded successfully: ${key}`);
|
||||
return uploadedUrl;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to upload file ${filePath} to ${key}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Download file from storage to local temporary directory
|
||||
*/
|
||||
async downloadToTemp(key: string): Promise<string> {
|
||||
try {
|
||||
const tempFileName = `${uuidv4()}_${path.basename(key)}`;
|
||||
const tempFilePath = path.join(this.tempDir, tempFileName);
|
||||
|
||||
this.logger.debug(`Downloading file: ${key} -> ${tempFilePath}`);
|
||||
|
||||
await this.provider.downloadFile(key, tempFilePath);
|
||||
|
||||
this.logger.debug(`File downloaded successfully: ${tempFilePath}`);
|
||||
return tempFilePath;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to download file ${key}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Download file from storage to specific path
|
||||
*/
|
||||
async downloadFile(key: string, destPath: string): Promise<void> {
|
||||
try {
|
||||
// Ensure destination directory exists
|
||||
const destDir = path.dirname(destPath);
|
||||
await fs.mkdir(destDir, { recursive: true });
|
||||
|
||||
await this.provider.downloadFile(key, destPath);
|
||||
this.logger.debug(`File downloaded: ${key} -> ${destPath}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to download file ${key} to ${destPath}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete file from storage
|
||||
*/
|
||||
async deleteFile(key: string): Promise<void> {
|
||||
try {
|
||||
await this.provider.deleteFile(key);
|
||||
this.logger.debug(`File deleted: ${key}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to delete file ${key}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Move/rename file in storage
|
||||
*/
|
||||
async moveFile(sourceKey: string, destKey: string): Promise<void> {
|
||||
try {
|
||||
await this.provider.moveFile(sourceKey, destKey);
|
||||
this.logger.debug(`File moved: ${sourceKey} -> ${destKey}`);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to move file ${sourceKey} to ${destKey}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get public URL for file (if supported)
|
||||
*/
|
||||
async getPublicUrl(key: string): Promise<string> {
|
||||
try {
|
||||
return await this.provider.getPublicUrl(key);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to get public URL for ${key}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate signed URL for temporary access
|
||||
*/
|
||||
async generateSignedUrl(key: string, expiresIn: number = 3600): Promise<string> {
|
||||
try {
|
||||
return await this.provider.generateSignedUrl(key, expiresIn);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to generate signed URL for ${key}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file exists in storage
|
||||
*/
|
||||
async fileExists(key: string): Promise<boolean> {
|
||||
try {
|
||||
return await this.provider.fileExists(key);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to check if file exists ${key}:`, error.message);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file metadata
|
||||
*/
|
||||
async getFileMetadata(key: string): Promise<any> {
|
||||
try {
|
||||
return await this.provider.getFileMetadata(key);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to get metadata for ${key}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List files with optional prefix
|
||||
*/
|
||||
async listFiles(prefix?: string, maxKeys: number = 1000): Promise<string[]> {
|
||||
try {
|
||||
return await this.provider.listFiles(prefix, maxKeys);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to list files with prefix ${prefix}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete temporary file
|
||||
*/
|
||||
async deleteTempFile(filePath: string): Promise<void> {
|
||||
try {
|
||||
// Only delete files in our temp directory for safety
|
||||
if (!filePath.startsWith(this.tempDir)) {
|
||||
this.logger.warn(`Skipping deletion of file outside temp directory: ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.unlink(filePath);
|
||||
this.logger.debug(`Temporary file deleted: ${filePath}`);
|
||||
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') { // Ignore file not found errors
|
||||
this.logger.warn(`Failed to delete temporary file ${filePath}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old temporary files
|
||||
*/
|
||||
async cleanupTempFiles(maxAge: number = 3600000): Promise<void> {
|
||||
try {
|
||||
const files = await fs.readdir(this.tempDir);
|
||||
const now = Date.now();
|
||||
let cleanedCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(this.tempDir, file);
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(filePath);
|
||||
const age = now - stats.mtime.getTime();
|
||||
|
||||
if (age > maxAge) {
|
||||
await fs.unlink(filePath);
|
||||
cleanedCount++;
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip files that can't be processed
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (cleanedCount > 0) {
|
||||
this.logger.log(`Cleaned up ${cleanedCount} old temporary files`);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to cleanup temporary files:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get storage statistics
|
||||
*/
|
||||
async getStorageStats(): Promise<{
|
||||
provider: string;
|
||||
tempDir: string;
|
||||
tempFilesCount: number;
|
||||
tempDirSize: number;
|
||||
}> {
|
||||
try {
|
||||
const files = await fs.readdir(this.tempDir);
|
||||
let totalSize = 0;
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const filePath = path.join(this.tempDir, file);
|
||||
const stats = await fs.stat(filePath);
|
||||
totalSize += stats.size;
|
||||
} catch (error) {
|
||||
// Skip files that can't be processed
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
provider: this.provider.constructor.name,
|
||||
tempDir: this.tempDir,
|
||||
tempFilesCount: files.length,
|
||||
tempDirSize: totalSize,
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get storage stats:', error.message);
|
||||
return {
|
||||
provider: this.provider.constructor.name,
|
||||
tempDir: this.tempDir,
|
||||
tempFilesCount: 0,
|
||||
tempDirSize: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test storage connectivity
|
||||
*/
|
||||
async testConnection(): Promise<boolean> {
|
||||
try {
|
||||
// Create a small test file
|
||||
const testKey = `test/${uuidv4()}.txt`;
|
||||
const testContent = 'Storage connection test';
|
||||
const testFilePath = path.join(this.tempDir, 'connection-test.txt');
|
||||
|
||||
// Write test file
|
||||
await fs.writeFile(testFilePath, testContent);
|
||||
|
||||
// Upload test file
|
||||
await this.uploadFile(testFilePath, testKey);
|
||||
|
||||
// Download test file
|
||||
const downloadPath = path.join(this.tempDir, 'connection-test-download.txt');
|
||||
await this.downloadFile(testKey, downloadPath);
|
||||
|
||||
// Verify content
|
||||
const downloadedContent = await fs.readFile(downloadPath, 'utf8');
|
||||
const isValid = downloadedContent === testContent;
|
||||
|
||||
// Cleanup
|
||||
await this.deleteFile(testKey);
|
||||
await this.deleteTempFile(testFilePath);
|
||||
await this.deleteTempFile(downloadPath);
|
||||
|
||||
this.logger.log(`Storage connection test: ${isValid ? 'PASSED' : 'FAILED'}`);
|
||||
return isValid;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error('Storage connection test failed:', error.message);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
465
packages/worker/src/storage/zip-creator.service.ts
Normal file
465
packages/worker/src/storage/zip-creator.service.ts
Normal file
|
@ -0,0 +1,465 @@
|
|||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import * as archiver from 'archiver';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { StorageService } from './storage.service';
|
||||
import { DatabaseService } from '../database/database.service';
|
||||
|
||||
export interface ZipEntry {
|
||||
fileName: string;
|
||||
originalName: string;
|
||||
proposedName: string;
|
||||
filePath?: string;
|
||||
s3Key?: string;
|
||||
}
|
||||
|
||||
export interface ZipCreationOptions {
|
||||
includeOriginals?: boolean;
|
||||
compressionLevel?: number;
|
||||
password?: string;
|
||||
excludeMetadata?: boolean;
|
||||
customStructure?: boolean;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ZipCreatorService {
|
||||
private readonly logger = new Logger(ZipCreatorService.name);
|
||||
private readonly tempDir: string;
|
||||
|
||||
constructor(
|
||||
private configService: ConfigService,
|
||||
private storageService: StorageService,
|
||||
private databaseService: DatabaseService,
|
||||
) {
|
||||
this.tempDir = this.configService.get<string>('TEMP_DIR', '/tmp/seo-worker');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create ZIP file for a batch of processed images
|
||||
*/
|
||||
async createBatchZip(
|
||||
batchId: string,
|
||||
imageIds: string[],
|
||||
zipName: string,
|
||||
options: ZipCreationOptions = {}
|
||||
): Promise<string> {
|
||||
const startTime = Date.now();
|
||||
this.logger.log(`🗂️ Creating ZIP for batch ${batchId} with ${imageIds.length} images`);
|
||||
|
||||
const zipFileName = `${zipName}_${uuidv4()}.zip`;
|
||||
const zipPath = path.join(this.tempDir, zipFileName);
|
||||
|
||||
try {
|
||||
// Get image details from database
|
||||
const images = await this.databaseService.getImagesByIds(imageIds);
|
||||
|
||||
if (images.length === 0) {
|
||||
throw new Error('No images found for ZIP creation');
|
||||
}
|
||||
|
||||
// Create ZIP entries
|
||||
const zipEntries = await this.prepareZipEntries(images, options);
|
||||
|
||||
// Create the ZIP file
|
||||
await this.createZipFromEntries(zipPath, zipEntries, options);
|
||||
|
||||
const stats = fs.statSync(zipPath);
|
||||
const processingTime = Date.now() - startTime;
|
||||
|
||||
this.logger.log(
|
||||
`✅ ZIP created successfully: ${zipPath} (${stats.size} bytes) in ${processingTime}ms`
|
||||
);
|
||||
|
||||
return zipPath;
|
||||
|
||||
} catch (error) {
|
||||
this.logger.error(`❌ Failed to create ZIP for batch ${batchId}:`, error.message);
|
||||
|
||||
// Cleanup failed ZIP file
|
||||
try {
|
||||
if (fs.existsSync(zipPath)) {
|
||||
fs.unlinkSync(zipPath);
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
this.logger.warn(`Failed to cleanup failed ZIP file: ${cleanupError.message}`);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create ZIP file from individual files
|
||||
*/
|
||||
async createZipFromFiles(
|
||||
files: Array<{ filePath: string; zipPath: string }>,
|
||||
outputPath: string,
|
||||
options: ZipCreationOptions = {}
|
||||
): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const output = fs.createWriteStream(outputPath);
|
||||
const archive = archiver('zip', {
|
||||
zlib: { level: options.compressionLevel || 6 },
|
||||
});
|
||||
|
||||
// Handle stream events
|
||||
output.on('close', () => {
|
||||
this.logger.debug(`ZIP file created: ${outputPath} (${archive.pointer()} bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('error', (error) => {
|
||||
this.logger.error('ZIP creation error:', error.message);
|
||||
reject(error);
|
||||
});
|
||||
|
||||
archive.on('warning', (warning) => {
|
||||
this.logger.warn('ZIP creation warning:', warning.message);
|
||||
});
|
||||
|
||||
// Pipe archive data to output file
|
||||
archive.pipe(output);
|
||||
|
||||
// Add files to archive
|
||||
for (const file of files) {
|
||||
if (fs.existsSync(file.filePath)) {
|
||||
archive.file(file.filePath, { name: file.zipPath });
|
||||
} else {
|
||||
this.logger.warn(`File not found, skipping: ${file.filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Add password protection if specified
|
||||
if (options.password) {
|
||||
// Note: Basic archiver doesn't support password protection
|
||||
// For production, consider using node-7z or yazl with encryption
|
||||
this.logger.warn('Password protection requested but not implemented in basic archiver');
|
||||
}
|
||||
|
||||
// Finalize the archive
|
||||
archive.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create ZIP with custom folder structure
|
||||
*/
|
||||
async createStructuredZip(
|
||||
batchId: string,
|
||||
structure: {
|
||||
[folderName: string]: string[]; // folder name -> array of image IDs
|
||||
},
|
||||
zipName: string,
|
||||
options: ZipCreationOptions = {}
|
||||
): Promise<string> {
|
||||
const zipFileName = `${zipName}_structured_${uuidv4()}.zip`;
|
||||
const zipPath = path.join(this.tempDir, zipFileName);
|
||||
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const output = fs.createWriteStream(zipPath);
|
||||
const archive = archiver('zip', {
|
||||
zlib: { level: options.compressionLevel || 6 },
|
||||
});
|
||||
|
||||
// Handle stream events
|
||||
output.on('close', () => {
|
||||
this.logger.log(`Structured ZIP created: ${zipPath} (${archive.pointer()} bytes)`);
|
||||
resolve(zipPath);
|
||||
});
|
||||
|
||||
archive.on('error', reject);
|
||||
archive.pipe(output);
|
||||
|
||||
// Process each folder
|
||||
for (const [folderName, imageIds] of Object.entries(structure)) {
|
||||
if (imageIds.length === 0) continue;
|
||||
|
||||
const images = await this.databaseService.getImagesByIds(imageIds);
|
||||
|
||||
for (const image of images) {
|
||||
try {
|
||||
// Download image to temp location
|
||||
const tempFilePath = await this.storageService.downloadToTemp(image.s3Key);
|
||||
|
||||
// Determine filename to use in ZIP
|
||||
const fileName = image.proposedName || image.originalName;
|
||||
const zipEntryPath = `${folderName}/${fileName}`;
|
||||
|
||||
// Add file to archive
|
||||
archive.file(tempFilePath, { name: zipEntryPath });
|
||||
|
||||
// Schedule cleanup of temp file after archive is complete
|
||||
output.on('close', () => {
|
||||
this.storageService.deleteTempFile(tempFilePath).catch(() => {});
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to add image ${image.id} to ZIP:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add README file if requested
|
||||
if (options.includeOriginals !== false) {
|
||||
const readmeContent = this.generateReadmeContent(batchId, structure);
|
||||
archive.append(readmeContent, { name: 'README.txt' });
|
||||
}
|
||||
|
||||
archive.finalize();
|
||||
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare ZIP entries from image data
|
||||
*/
|
||||
private async prepareZipEntries(
|
||||
images: any[],
|
||||
options: ZipCreationOptions
|
||||
): Promise<ZipEntry[]> {
|
||||
const entries: ZipEntry[] = [];
|
||||
const usedNames = new Set<string>();
|
||||
|
||||
for (const image of images) {
|
||||
try {
|
||||
// Determine the filename to use
|
||||
let fileName = image.proposedName || image.originalName;
|
||||
|
||||
// Ensure unique filenames
|
||||
fileName = this.ensureUniqueFilename(fileName, usedNames);
|
||||
usedNames.add(fileName.toLowerCase());
|
||||
|
||||
const entry: ZipEntry = {
|
||||
fileName,
|
||||
originalName: image.originalName,
|
||||
proposedName: image.proposedName || image.originalName,
|
||||
s3Key: image.s3Key,
|
||||
};
|
||||
|
||||
entries.push(entry);
|
||||
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to prepare ZIP entry for image ${image.id}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug(`Prepared ${entries.length} ZIP entries`);
|
||||
return entries;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create ZIP file from prepared entries
|
||||
*/
|
||||
private async createZipFromEntries(
|
||||
zipPath: string,
|
||||
entries: ZipEntry[],
|
||||
options: ZipCreationOptions
|
||||
): Promise<void> {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const output = fs.createWriteStream(zipPath);
|
||||
const archive = archiver('zip', {
|
||||
zlib: { level: options.compressionLevel || 6 },
|
||||
});
|
||||
|
||||
const tempFiles: string[] = [];
|
||||
|
||||
// Handle stream events
|
||||
output.on('close', () => {
|
||||
// Cleanup temp files
|
||||
this.cleanupTempFiles(tempFiles);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('error', (error) => {
|
||||
this.cleanupTempFiles(tempFiles);
|
||||
reject(error);
|
||||
});
|
||||
|
||||
archive.pipe(output);
|
||||
|
||||
try {
|
||||
// Process each entry
|
||||
for (const entry of entries) {
|
||||
if (entry.s3Key) {
|
||||
// Download file from storage
|
||||
const tempFilePath = await this.storageService.downloadToTemp(entry.s3Key);
|
||||
tempFiles.push(tempFilePath);
|
||||
|
||||
// Add to archive
|
||||
archive.file(tempFilePath, { name: entry.fileName });
|
||||
} else if (entry.filePath) {
|
||||
// Use local file
|
||||
archive.file(entry.filePath, { name: entry.fileName });
|
||||
}
|
||||
}
|
||||
|
||||
// Add metadata file if not excluded
|
||||
if (!options.excludeMetadata) {
|
||||
const metadataContent = this.generateMetadataContent(entries);
|
||||
archive.append(metadataContent, { name: 'metadata.json' });
|
||||
}
|
||||
|
||||
// Add processing summary
|
||||
const summaryContent = this.generateSummaryContent(entries);
|
||||
archive.append(summaryContent, { name: 'processing_summary.txt' });
|
||||
|
||||
archive.finalize();
|
||||
|
||||
} catch (error) {
|
||||
this.cleanupTempFiles(tempFiles);
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure filename is unique within the ZIP
|
||||
*/
|
||||
private ensureUniqueFilename(fileName: string, usedNames: Set<string>): string {
|
||||
const originalName = fileName;
|
||||
const baseName = path.parse(fileName).name;
|
||||
const extension = path.parse(fileName).ext;
|
||||
|
||||
let counter = 1;
|
||||
let uniqueName = fileName;
|
||||
|
||||
while (usedNames.has(uniqueName.toLowerCase())) {
|
||||
uniqueName = `${baseName}_${counter}${extension}`;
|
||||
counter++;
|
||||
}
|
||||
|
||||
if (uniqueName !== originalName) {
|
||||
this.logger.debug(`Renamed duplicate file: ${originalName} -> ${uniqueName}`);
|
||||
}
|
||||
|
||||
return uniqueName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate metadata JSON content
|
||||
*/
|
||||
private generateMetadataContent(entries: ZipEntry[]): string {
|
||||
const metadata = {
|
||||
createdAt: new Date().toISOString(),
|
||||
totalFiles: entries.length,
|
||||
processingInfo: {
|
||||
service: 'SEO Image Renamer Worker',
|
||||
version: '1.0.0',
|
||||
},
|
||||
files: entries.map(entry => ({
|
||||
fileName: entry.fileName,
|
||||
originalName: entry.originalName,
|
||||
proposedName: entry.proposedName,
|
||||
})),
|
||||
};
|
||||
|
||||
return JSON.stringify(metadata, null, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate summary text content
|
||||
*/
|
||||
private generateSummaryContent(entries: ZipEntry[]): string {
|
||||
const renamedCount = entries.filter(e => e.fileName !== e.originalName).length;
|
||||
const unchangedCount = entries.length - renamedCount;
|
||||
|
||||
return `SEO Image Renamer - Processing Summary
|
||||
==========================================
|
||||
|
||||
Total Files: ${entries.length}
|
||||
Renamed Files: ${renamedCount}
|
||||
Unchanged Files: ${unchangedCount}
|
||||
|
||||
Processing Date: ${new Date().toISOString()}
|
||||
|
||||
File List:
|
||||
${entries.map(entry => {
|
||||
const status = entry.fileName !== entry.originalName ? '✓ RENAMED' : '- unchanged';
|
||||
return `${status}: ${entry.originalName} -> ${entry.fileName}`;
|
||||
}).join('\n')}
|
||||
|
||||
Generated by SEO Image Renamer Worker Service
|
||||
For support, visit: https://seo-image-renamer.com
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate README content for structured ZIPs
|
||||
*/
|
||||
private generateReadmeContent(batchId: string, structure: { [key: string]: string[] }): string {
|
||||
const folderList = Object.entries(structure)
|
||||
.map(([folder, imageIds]) => ` ${folder}/ (${imageIds.length} images)`)
|
||||
.join('\n');
|
||||
|
||||
return `SEO Image Renamer - Batch Processing Results
|
||||
=============================================
|
||||
|
||||
Batch ID: ${batchId}
|
||||
Created: ${new Date().toISOString()}
|
||||
|
||||
Folder Structure:
|
||||
${folderList}
|
||||
|
||||
Instructions:
|
||||
- Each folder contains images organized by your specified criteria
|
||||
- Filenames have been optimized for SEO based on AI vision analysis
|
||||
- Original filenames are preserved in the metadata.json file
|
||||
|
||||
For more information about our AI-powered image renaming service,
|
||||
visit: https://seo-image-renamer.com
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup temporary files
|
||||
*/
|
||||
private async cleanupTempFiles(filePaths: string[]): Promise<void> {
|
||||
for (const filePath of filePaths) {
|
||||
try {
|
||||
await this.storageService.deleteTempFile(filePath);
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to cleanup temp file ${filePath}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup ZIP file
|
||||
*/
|
||||
async cleanupZipFile(zipPath: string): Promise<void> {
|
||||
try {
|
||||
// Only delete files in our temp directory for safety
|
||||
if (zipPath.startsWith(this.tempDir)) {
|
||||
fs.unlinkSync(zipPath);
|
||||
this.logger.debug(`ZIP file cleaned up: ${zipPath}`);
|
||||
} else {
|
||||
this.logger.warn(`Skipping cleanup of ZIP file outside temp directory: ${zipPath}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
this.logger.warn(`Failed to cleanup ZIP file ${zipPath}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ZIP creation statistics
|
||||
*/
|
||||
getZipStats(): {
|
||||
tempDir: string;
|
||||
supportedFormats: string[];
|
||||
defaultCompression: number;
|
||||
} {
|
||||
return {
|
||||
tempDir: this.tempDir,
|
||||
supportedFormats: ['zip'],
|
||||
defaultCompression: 6,
|
||||
};
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue