I am trying to upload large files to s3 bucket It uploads files.
But It also uploads all chunks and at last it uploads the big file.
try {
if (!req.file) {
return next(new ErrorHandler('Files not found', 400, res)); // Remove 'res' from the ErrorHandler constructor
}
const userId = req.user._id;
const bucketName = process.env.BUCKET_NAME;
const objectKey = `${Date.now().toString()}-${req.file.originalname}`;
const stream = req.file.buffer;
const chunkSize = 5 * 1024 * 1024;
const totalChunks = Math.ceil(stream.length / chunkSize);
let uploadedBytes = 0;
const mergedChunks = [];
for (let i = 0; i < totalChunks; i++) {
const start = i * chunkSize;
const end = Math.min(start + chunkSize, stream.length);
const chunk = stream.slice(start, end);
const chunkKey = `${objectKey}-chunk-${i}`;
const progressCallback = (bytesUploaded) => {
uploadedBytes += bytesUploaded;
const percentage = (uploadedBytes / stream.length) * 100;
console.log(`Upload progress: ${percentage.toFixed(2)}%`);
};
await uploadStreamToS3(chunk, bucketName, chunkKey, progressCallback);
mergedChunks.push(chunkKey);
}
// Merge chunks and upload the merged file
const mergedBuffer = await mergeChunksFromS3(mergedChunks, bucketName);
const mergedKey = `${objectKey}-merged`;
await uploadBufferToS3(mergedBuffer, bucketName, mergedKey);
const liveUrl = `https://${bucketName}.s3.amazonaws.com/${mergedKey}`;
// Create newFile and respond
const newFile = {
userId: userId,
liveUrl: liveUrl
};
return res
.status(200)
.json({ message: 'File uploaded successfully', newFile });
} catch (error) {
console.log('error -->>', error);
next(error);
}
I want to upload the single large file and not chunks.
And upload time is also not too much.
Please clarify your specific problem or provide additional details to highlight exactly what you need. As it’s currently written, it’s hard to tell exactly what you’re asking.
Bot
You didn’t include any of your AWS SDK calls but consider using the Node SDK v3 and specifically @aws-sdk/lib-storage.