From 7a0da7f3108c00112756aae2cf870ddd053718d7 Mon Sep 17 00:00:00 2001 From: Thomas Date: Fri, 13 Dec 2024 13:34:39 +1100 Subject: [PATCH] Remove console logs --- s3.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/s3.js b/s3.js index 095afa5..f55579b 100644 --- a/s3.js +++ b/s3.js @@ -887,7 +887,7 @@ async function backupToS3() { if (dataSize > chunkSize) { try { - console.log('Starting Multipart upload to S3'); + //console.log('Starting Multipart upload to S3'); const createMultipartParams = { Bucket: bucketName, Key: 'typingmind-backup.json', @@ -929,7 +929,7 @@ async function backupToS3() { ETag: uploadResult.ETag, PartNumber: partNumber }); - console.log(`Part ${partNumber} uploaded successfully with ETag: ${uploadResult.ETag}`); + //console.log(`Part ${partNumber} uploaded successfully with ETag: ${uploadResult.ETag}`); break; // Success, exit retry loop } catch (error) { console.error(`Error uploading part ${partNumber}:`, error); @@ -955,7 +955,7 @@ async function backupToS3() { // Update progress const progress = Math.round((start + chunkSize) / dataSize * 100); - console.log(`Upload progress: ${Math.min(progress, 100)}%`); + //console.log(`Upload progress: ${Math.min(progress, 100)}%`); } const sortedParts = uploadedParts @@ -974,10 +974,10 @@ async function backupToS3() { } }; - console.log('Complete Multipart Upload Request:', JSON.stringify(completeParams, null, 2)); + //console.log('Complete Multipart Upload Request:', JSON.stringify(completeParams, null, 2)); await s3.completeMultipartUpload(completeParams).promise(); - console.log('Multipart upload completed successfully'); + //console.log('Multipart upload completed successfully'); } catch (error) { console.error('Multipart upload failed:', error); // Fall back to regular upload if multipart fails