import { env } from "@config/env.js";
import { GCS_PATHS } from "@constants/gcs.constants.js";
import { OBJECT_IMAGE_FORMATS } from "@constants/investigations.constants";
import { Storage } from "@google-cloud/storage";
import { assertNever } from "@utils/assert.utils";
import { getLogger } from "@utils/asyncLocalStorage.js";
import crypto from "crypto";
import sharp from "sharp";
/**
* Google Cloud Storage Service
* Handles file uploads and management in Google Cloud Storage with automatic image optimization.
* @category Services
*/
class GcsService {
storage;
bucketName;
bucket;
/**
* Instantiate the GCS client with configured credentials and bucket.
* @category Services
*/
constructor() {
const logger = getLogger();
// Initialize Google Cloud Storage client
// Credentials file should be located at the specified path
const keyFilename = "data/gcp-service.json";
this.storage = new Storage({
projectId: env.GCS_PROJECT_ID,
keyFilename,
});
this.bucketName = env.GCS_BUCKET_NAME;
this.bucket = this.storage.bucket(this.bucketName);
logger.info({
bucketName: this.bucketName,
keyFile: keyFilename,
}, "GCS service initialized");
}
/**
* Validates a base64 encoded image string.
* @private
* @param {string} base64Image - Base64 encoded image string.
* @returns {string} The extracted image data.
* @throws {Error} If the image format is invalid.
*/
validateBase64Image(base64Image) {
const matches = base64Image.match(/^data:image\/(jpeg|jpg|png|gif|webp);base64,(.+)$/);
if (!matches || matches.length !== 3) {
throw new Error("Invalid base64 image format. Supported: jpeg, jpg, png, gif, webp");
}
const imageData = matches[2];
if (!imageData) {
throw new Error("Invalid image data");
}
return imageData;
}
/**
* Processes an image buffer with Sharp (resize, compress, format conversion).
* @private
* @param {Buffer} buffer - Original image buffer.
* @param {IImageProcessingConfig} config - Image processing configuration.
* @returns {Promise<Buffer>} Processed image buffer.
*/
async processImageBuffer(buffer, config) {
const logger = getLogger();
// Validate original file size before processing
const maxSizeBytes = config.maxSizeMB * 1024 * 1024;
if (buffer.length > maxSizeBytes) {
throw new Error(`Original image size exceeds ${config.maxSizeMB}MB limit`);
}
logger.info({ originalSize: buffer.length }, "Processing image with Sharp");
// Process image with Sharp
// - Resize to configured size
// - Convert to configured format
// - Auto-orient based on EXIF data
// - Strip metadata for privacy
let sharpPipeline = sharp(buffer)
.resize(config.targetSize, config.targetSize, {
fit: "inside",
withoutEnlargement: true,
})
.rotate(); // Auto-orient based on EXIF
// Apply format-specific compression
switch (config.format) {
case OBJECT_IMAGE_FORMATS.WEBP:
sharpPipeline = sharpPipeline.webp({
quality: config.quality,
effort: config.compressionEffort,
});
break;
case OBJECT_IMAGE_FORMATS.JPEG:
sharpPipeline = sharpPipeline.jpeg({
quality: config.quality,
progressive: true,
mozjpeg: true,
});
break;
case OBJECT_IMAGE_FORMATS.PNG:
sharpPipeline = sharpPipeline.png({
quality: config.quality,
compressionLevel: 9,
effort: config.compressionEffort,
});
break;
default:
assertNever(config.format);
}
return await sharpPipeline.toBuffer();
}
/**
* Generates a unique file path for an image.
* @private
* @param {string} basePath - Base path (e.g., "object-images").
* @param {string} format - File extension/format (e.g., "webp", "jpeg", "png").
* @param {string[]} pathSegments - Additional path segments (e.g., investigationId, objectId).
* @returns {string} Complete file path with timestamp and random string.
*/
generateImagePath(basePath, format, ...pathSegments) {
const timestamp = Date.now();
const randomString = crypto.randomBytes(8).toString("hex");
const envPrefix = env.ENV_PREFIX ? `${env.ENV_PREFIX}/` : "";
const segments = [basePath, ...pathSegments].join("/");
return `${envPrefix}${segments}/${timestamp}-${randomString}.${format}`;
}
/**
* Uploads a buffer to GCS with metadata.
* @private
* @param {Buffer} buffer - File buffer to upload.
* @param {string} path - File path in GCS.
* @param {string} contentType - Content type (e.g., "image/webp", "image/jpeg").
* @param {Record<string, string>} customMetadata - Custom metadata to attach.
* @returns {Promise<string>} Public URL of the uploaded file.
*/
async uploadBuffer(buffer, path, contentType, customMetadata) {
const file = this.bucket.file(path);
await file.save(buffer, {
metadata: {
contentType,
metadata: {
uploadedAt: new Date().toISOString(),
...customMetadata,
},
},
});
return `https://storage.googleapis.com/${this.bucketName}/${path}`;
}
/**
* Extracts filename from a GCS public URL.
* @private
* @param {string} url - GCS public URL.
* @returns {string | null} Extracted filename or null if invalid.
*/
extractFilenameFromUrl(url) {
const urlPattern = new RegExp(`https://storage\\.googleapis\\.com/${this.bucketName}/(.+)$`);
const matches = url.match(urlPattern);
if (!matches || matches.length !== 2) {
return null;
}
return matches[1] || null;
}
/**
* Upload an object image to GCS with resizing and compression.
* @param {string} investigationId - The investigation ID.
* @param {string} objectId - The object ID.
* @param {string} base64Image - Base64 encoded image string.
* @returns {Promise<string>} The public URL of the uploaded image.
*/
async uploadObjectImage(investigationId, objectId, base64Image) {
const logger = getLogger();
try {
logger.info({ investigationId, objectId }, "Starting object image upload to GCS");
// Configure image processing for object images
const imageConfig = {
maxSizeMB: env.OBJECT_IMAGE_MAX_SIZE_MB,
targetSize: env.OBJECT_IMAGE_SIZE,
format: env.OBJECT_IMAGE_FORMAT,
quality: env.OBJECT_IMAGE_QUALITY,
compressionEffort: env.OBJECT_IMAGE_COMPRESSION_EFFORT,
};
// Validate base64 image format
const imageData = this.validateBase64Image(base64Image);
// Convert base64 to buffer
const originalBuffer = Buffer.from(imageData, "base64");
logger.info({ investigationId, objectId, originalSize: originalBuffer.length }, "Processing image with Sharp");
// Process image with Sharp
const processedBuffer = await this.processImageBuffer(originalBuffer, imageConfig);
const finalSize = processedBuffer.length;
const compressionRatio = ((1 - finalSize / originalBuffer.length) * 100).toFixed(1);
logger.info({
investigationId,
objectId,
originalSize: originalBuffer.length,
finalSize,
compressionRatio: `${compressionRatio}%`,
}, "Image processed successfully");
// Generate unique filename
const filename = this.generateImagePath(GCS_PATHS.OBJECTS, imageConfig.format, investigationId, objectId);
// Upload the processed file
const publicUrl = await this.uploadBuffer(processedBuffer, filename, `image/${imageConfig.format}`, {
investigationId,
objectId,
originalSize: originalBuffer.length.toString(),
processedSize: finalSize.toString(),
compressionRatio: `${compressionRatio}%`,
maxDimension: imageConfig.targetSize.toString(),
format: imageConfig.format,
});
logger.info({
investigationId,
objectId,
publicUrl,
compressionRatio: `${compressionRatio}%`,
}, "Object image uploaded successfully to GCS");
return publicUrl;
}
catch (error) {
logger.error({
error,
investigationId,
objectId,
message: "Failed to upload object image to GCS",
});
throw error;
}
}
/**
* Delete an object image from GCS.
* @param {string} imageUrl - The public URL of the image to delete.
* @returns {Promise<boolean>} True when deleted successfully.
*/
async deleteObjectImage(imageUrl) {
const logger = getLogger();
try {
logger.info({ imageUrl }, "Deleting object image from GCS");
// Extract filename from URL
const filename = this.extractFilenameFromUrl(imageUrl);
if (!filename) {
logger.warn({ imageUrl }, "Invalid GCS URL format, skipping deletion");
return false;
}
// Delete the file
const file = this.bucket.file(filename);
await file.delete();
logger.info({ filename }, "Object image deleted successfully from GCS");
return true;
}
catch (error) {
// If file doesn't exist (404), consider it a success
if (error instanceof Error && error.message.includes("404")) {
logger.warn({ imageUrl }, "Object image file not found in GCS, already deleted");
return true;
}
logger.error({
error,
imageUrl,
message: "Failed to delete object image from GCS",
});
// Don't throw error, just return false to avoid breaking the flow
return false;
}
}
/**
* Delete all images for a specific investigation.
* @param {string} investigationId - The investigation ID.
* @returns {Promise<number>} Number of files deleted.
*/
async deleteInvestigationImages(investigationId) {
const logger = getLogger();
try {
logger.info({ investigationId }, "Deleting all images for investigation from GCS");
const envPrefix = env.ENV_PREFIX ? `${env.ENV_PREFIX}/` : "";
const prefix = `${envPrefix}${GCS_PATHS.OBJECTS}/${investigationId}/`;
const [files] = await this.bucket.getFiles({
prefix,
autoPaginate: true, // Ensures we get all files, even if there are thousands
});
if (files.length === 0) {
logger.info({ investigationId }, "No images found for investigation in GCS");
return 0;
}
await Promise.all(files.map((file) => file.delete()));
logger.info({ investigationId, count: files.length }, "All investigation images deleted from GCS");
return files.length;
}
catch (error) {
logger.error({
error,
investigationId,
message: "Failed to delete investigation images from GCS",
});
throw error;
}
}
/**
* Check if an image exists in GCS.
* @param {string} imageUrl - The public URL of the image.
* @returns {Promise<boolean>} True if the file exists.
*/
async imageExists(imageUrl) {
const logger = getLogger();
try {
// Extract filename from URL
const filename = this.extractFilenameFromUrl(imageUrl);
if (!filename) {
return false;
}
const file = this.bucket.file(filename);
const [exists] = await file.exists();
return exists;
}
catch (error) {
logger.error({
error,
imageUrl,
message: "Failed to check image existence in GCS",
});
return false;
}
}
/**
* Get image metadata.
* @param {string} imageUrl - The public URL of the image.
* @returns {Promise<FileMetadata | null>} File metadata response.
*/
async getImageMetadata(imageUrl) {
const logger = getLogger();
try {
// Extract filename from URL
const filename = this.extractFilenameFromUrl(imageUrl);
if (!filename) {
throw new Error("Invalid GCS URL format");
}
const file = this.bucket.file(filename);
const [metadata] = await file.getMetadata();
return {
name: metadata.name,
size: metadata.size,
contentType: metadata.contentType,
timeCreated: metadata.timeCreated,
updated: metadata.updated,
md5Hash: metadata.md5Hash,
};
}
catch (error) {
logger.error({
error,
imageUrl,
message: "Failed to get image metadata from GCS",
});
throw error;
}
}
/**
* Health check for GCS connection.
* @returns {Promise<boolean>} True if GCS is accessible.
*/
async healthCheck() {
const logger = getLogger();
try {
// Using maxResults: 1 to minimize overhead
await this.bucket.getFiles({ maxResults: 1 });
logger.info({ bucketName: this.bucketName }, "GCS health check passed");
return true;
}
catch (error) {
logger.error({
error,
bucketName: this.bucketName,
message: "GCS health check failed",
});
return false;
}
}
}
export const gcsService = new GcsService();
Source