import { Injectable, Logger, BadRequestException, PayloadTooLargeException } from '@nestjs/common'; import * as sharp from 'sharp'; import { StorageService, StorageFile, UploadResult } from '../storage/storage.service'; export interface ImageMetadata { width: number; height: number; format: string; size: number; hasAlpha: boolean; density?: number; } export interface ProcessedUpload { uploadResult: UploadResult; metadata: ImageMetadata; originalName: string; mimeType: string; } export interface UploadQuotaCheck { allowed: boolean; remainingQuota: number; requestedCount: number; maxFileSize: number; } @Injectable() export class UploadService { private readonly logger = new Logger(UploadService.name); // File size limits (in bytes) private readonly MAX_FILE_SIZE = 50 * 1024 * 1024; // 50MB private readonly MAX_TOTAL_SIZE = 500 * 1024 * 1024; // 500MB per batch // Quota limits by plan private readonly QUOTA_LIMITS = { BASIC: 50, PRO: 500, MAX: 1000, }; constructor(private readonly storageService: StorageService) {} /** * Process and upload multiple files * @param files Array of uploaded files * @param batchId Batch UUID for organization * @param keywords Optional keywords for processing * @returns Array of processed uploads */ async processMultipleFiles( files: Express.Multer.File[], batchId: string, keywords?: string[] ): Promise { this.logger.log(`Processing ${files.length} files for batch: ${batchId}`); // Validate files this.validateFiles(files); const results: ProcessedUpload[] = []; const duplicateHashes = new Set(); for (const file of files) { try { // Check for duplicates by checksum const checksum = this.storageService.calculateChecksum(file.buffer); if (duplicateHashes.has(checksum)) { this.logger.warn(`Duplicate file detected: ${file.originalname}`); continue; } duplicateHashes.add(checksum); // Process individual file const processed = await this.processSingleFile(file, batchId, keywords); results.push(processed); } catch (error) { this.logger.error(`Failed to process file: ${file.originalname}`, error.stack); // Continue processing other files } } this.logger.log(`Successfully processed ${results.length}/${files.length} files`); return results; } /** * Process a single file upload * @param file Uploaded file * @param batchId Batch UUID * @param keywords Optional keywords * @returns Processed upload result */ async processSingleFile( file: Express.Multer.File, batchId: string, keywords?: string[] ): Promise { try { // Validate file type if (!this.storageService.isValidImageMimeType(file.mimetype)) { throw new BadRequestException(`Unsupported file type: ${file.mimetype}`); } // Extract image metadata const metadata = await this.extractImageMetadata(file.buffer); // Create storage file object const storageFile: StorageFile = { buffer: file.buffer, originalName: file.originalname, mimeType: file.mimetype, size: file.size, }; // Upload to storage const uploadResult = await this.storageService.uploadFile(storageFile, batchId); this.logger.log(`File processed successfully: ${file.originalname}`); return { uploadResult, metadata, originalName: file.originalname, mimeType: file.mimetype, }; } catch (error) { this.logger.error(`Failed to process file: ${file.originalname}`, error.stack); throw error; } } /** * Extract image metadata using Sharp * @param buffer Image buffer * @returns Image metadata */ async extractImageMetadata(buffer: Buffer): Promise { try { const image = sharp(buffer); const metadata = await image.metadata(); return { width: metadata.width || 0, height: metadata.height || 0, format: metadata.format || 'unknown', size: buffer.length, hasAlpha: metadata.hasAlpha || false, density: metadata.density, }; } catch (error) { this.logger.error('Failed to extract image metadata', error.stack); throw new BadRequestException('Invalid image file'); } } /** * Validate uploaded files * @param files Array of files to validate */ private validateFiles(files: Express.Multer.File[]): void { if (!files || files.length === 0) { throw new BadRequestException('No files provided'); } let totalSize = 0; for (const file of files) { // Check individual file size if (file.size > this.MAX_FILE_SIZE) { throw new PayloadTooLargeException( `File ${file.originalname} exceeds maximum size of ${this.MAX_FILE_SIZE / (1024 * 1024)}MB` ); } // Check file type if (!this.storageService.isValidImageMimeType(file.mimetype)) { throw new BadRequestException( `Unsupported file type: ${file.mimetype} for file ${file.originalname}` ); } totalSize += file.size; } // Check total batch size if (totalSize > this.MAX_TOTAL_SIZE) { throw new PayloadTooLargeException( `Total batch size exceeds maximum of ${this.MAX_TOTAL_SIZE / (1024 * 1024)}MB` ); } } /** * Check if user has sufficient quota for upload * @param fileCount Number of files to upload * @param userPlan User's subscription plan * @param remainingQuota User's remaining quota * @returns Quota check result */ checkUploadQuota( fileCount: number, userPlan: 'BASIC' | 'PRO' | 'MAX', remainingQuota: number ): UploadQuotaCheck { const maxQuota = this.QUOTA_LIMITS[userPlan]; const allowed = remainingQuota >= fileCount; return { allowed, remainingQuota, requestedCount: fileCount, maxFileSize: this.MAX_FILE_SIZE, }; } /** * Generate thumbnail for image * @param buffer Original image buffer * @param width Thumbnail width (default: 200) * @param height Thumbnail height (default: 200) * @returns Thumbnail buffer */ async generateThumbnail( buffer: Buffer, width: number = 200, height: number = 200 ): Promise { try { return await sharp(buffer) .resize(width, height, { fit: 'cover', position: 'center', }) .jpeg({ quality: 80, progressive: true, }) .toBuffer(); } catch (error) { this.logger.error('Failed to generate thumbnail', error.stack); throw new Error('Thumbnail generation failed'); } } /** * Optimize image for web display * @param buffer Original image buffer * @param quality JPEG quality (1-100) * @returns Optimized image buffer */ async optimizeImage(buffer: Buffer, quality: number = 85): Promise { try { const metadata = await sharp(buffer).metadata(); // Skip optimization for very small images if ((metadata.width || 0) * (metadata.height || 0) < 50000) { return buffer; } return await sharp(buffer) .jpeg({ quality, progressive: true, mozjpeg: true, }) .toBuffer(); } catch (error) { this.logger.error('Failed to optimize image', error.stack); return buffer; // Return original on error } } /** * Validate file against virus/malware (placeholder for future implementation) * @param buffer File buffer * @returns Whether file is safe */ async validateFileSafety(buffer: Buffer): Promise { // TODO: Implement virus scanning if needed // For now, just check if it's a valid image try { await sharp(buffer).metadata(); return true; } catch { return false; } } /** * Get supported file types * @returns Array of supported MIME types */ getSupportedFileTypes(): string[] { return [ 'image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/webp', ]; } /** * Get file size limits * @returns File size limits configuration */ getFileSizeLimits() { return { maxFileSize: this.MAX_FILE_SIZE, maxTotalSize: this.MAX_TOTAL_SIZE, maxFileSizeMB: this.MAX_FILE_SIZE / (1024 * 1024), maxTotalSizeMB: this.MAX_TOTAL_SIZE / (1024 * 1024), }; } }