Просмотр исходного кода

fix: use @aws-sdk/lib-storage Upload for bulk export S3 compatibility

Ryotaro Nagahara 3 недель назад
Родитель
Сommit
b261c4ea85

+ 1 - 0
apps/app/package.json

@@ -62,6 +62,7 @@
   "dependencies": {
   "dependencies": {
     "@akebifiky/remark-simple-plantuml": "^1.0.2",
     "@akebifiky/remark-simple-plantuml": "^1.0.2",
     "@aws-sdk/client-s3": "3.454.0",
     "@aws-sdk/client-s3": "3.454.0",
+    "@aws-sdk/lib-storage": "3.454.0",
     "@aws-sdk/s3-request-presigner": "3.454.0",
     "@aws-sdk/s3-request-presigner": "3.454.0",
     "@azure/identity": "^4.4.1",
     "@azure/identity": "^4.4.1",
     "@azure/openai": "^2.0.0",
     "@azure/openai": "^2.0.0",

+ 1 - 10
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/index.ts

@@ -196,10 +196,6 @@ class PageBulkExportJobCronService
       pageBulkExportJob.statusOnPreviousCronExec = pageBulkExportJob.status;
       pageBulkExportJob.statusOnPreviousCronExec = pageBulkExportJob.status;
       await pageBulkExportJob.save();
       await pageBulkExportJob.save();
 
 
-      logger.info(
-        `proceedBulkExportJob: status=${pageBulkExportJob.status}, jobId=${pageBulkExportJob._id}`,
-      );
-
       if (pageBulkExportJob.status === PageBulkExportJobStatus.initializing) {
       if (pageBulkExportJob.status === PageBulkExportJobStatus.initializing) {
         await createPageSnapshotsAsync.bind(this)(user, pageBulkExportJob);
         await createPageSnapshotsAsync.bind(this)(user, pageBulkExportJob);
       } else if (
       } else if (
@@ -209,7 +205,6 @@ class PageBulkExportJobCronService
       } else if (
       } else if (
         pageBulkExportJob.status === PageBulkExportJobStatus.uploading
         pageBulkExportJob.status === PageBulkExportJobStatus.uploading
       ) {
       ) {
-        logger.info('proceedBulkExportJob: calling compressAndUpload');
         await compressAndUpload.bind(this)(user, pageBulkExportJob);
         await compressAndUpload.bind(this)(user, pageBulkExportJob);
       }
       }
     } catch (err) {
     } catch (err) {
@@ -230,14 +225,10 @@ class PageBulkExportJobCronService
     err: Error | null,
     err: Error | null,
     pageBulkExportJob: PageBulkExportJobDocument,
     pageBulkExportJob: PageBulkExportJobDocument,
   ) {
   ) {
-    logger.info('handleError: called', {
-      errType: err?.constructor.name,
-      errMessage: err?.message,
-    });
     if (err == null) return;
     if (err == null) return;
 
 
     if (err instanceof BulkExportJobExpiredError) {
     if (err instanceof BulkExportJobExpiredError) {
-      logger.error('handleError: BulkExportJobExpiredError', err);
+      logger.error(err);
       await this.notifyExportResultAndCleanUp(
       await this.notifyExportResultAndCleanUp(
         SupportedAction.ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED,
         SupportedAction.ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED,
         pageBulkExportJob,
         pageBulkExportJob,

+ 17 - 51
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/compress-and-upload.ts

@@ -1,6 +1,4 @@
-import { createReadStream, createWriteStream } from 'node:fs';
-import fs from 'node:fs/promises';
-import path from 'node:path';
+import { PassThrough } from 'node:stream';
 import type { Archiver } from 'archiver';
 import type { Archiver } from 'archiver';
 import archiver from 'archiver';
 import archiver from 'archiver';
 
 
@@ -57,13 +55,10 @@ async function postProcess(
 /**
 /**
  * Compress page files into a tar.gz archive and upload to cloud storage.
  * Compress page files into a tar.gz archive and upload to cloud storage.
  *
  *
- * Uses a temporary file instead of streaming directly to avoid two issues with AWS S3:
- * 1. archiver's readable-stream (npm) fails AWS SDK's `instanceof Readable` check against Node.js built-in stream
- * 2. PutObjectCommand sends `Transfer-Encoding: chunked` for streams without Content-Length, which S3 rejects with 501
- *
- * Writing to a temp file and using createReadStream resolves both:
- * - createReadStream returns a native ReadStream (passes instanceof check)
- * - AWS SDK auto-detects file size from ReadStream.path via lstatSync, setting Content-Length
+ * Wraps archiver output with PassThrough to provide a Node.js native Readable,
+ * since archiver uses npm's readable-stream which fails AWS SDK's instanceof check.
+ * The Content-Length / Transfer-Encoding issue is resolved by aws/index.ts using
+ * the Upload class from @aws-sdk/lib-storage.
  */
  */
 export async function compressAndUpload(
 export async function compressAndUpload(
   this: IPageBulkExportJobCronService,
   this: IPageBulkExportJobCronService,
@@ -85,59 +80,30 @@ export async function compressAndUpload(
   );
   );
 
 
   const fileUploadService: FileUploader = this.crowi.fileUploadService;
   const fileUploadService: FileUploader = this.crowi.fileUploadService;
-  // Place temp file in the parent directory to avoid archiver picking it up
-  // (archiver.directory() scans getTmpOutputDir asynchronously via glob)
-  const tmpFilePath = path.join(
-    this.getTmpOutputDir(pageBulkExportJob),
-    '..',
-    `${originalName}.tmp`,
-  );
 
 
-  logger.info('starting');
+  // Wrap with Node.js native PassThrough so that AWS SDK recognizes the stream as a native Readable
+  const uploadStream = new PassThrough();
+  pageArchiver.pipe(uploadStream);
 
 
   pageArchiver.on('error', (err) => {
   pageArchiver.on('error', (err) => {
     logger.error('pageArchiver error', err);
     logger.error('pageArchiver error', err);
-    // Do not call pageArchiver.destroy() here: it corrupts internal state
-    // while the async queue is still processing, causing uncaught exceptions.
-    // The error is propagated via the Promise rejection below.
+    uploadStream.destroy(err);
   });
   });
 
 
   pageArchiver.directory(this.getTmpOutputDir(pageBulkExportJob), false);
   pageArchiver.directory(this.getTmpOutputDir(pageBulkExportJob), false);
   pageArchiver.finalize();
   pageArchiver.finalize();
-  logger.info('finalize called');
 
 
-  this.setStreamsInExecution(pageBulkExportJob._id, pageArchiver);
+  this.setStreamsInExecution(pageBulkExportJob._id, pageArchiver, uploadStream);
 
 
   try {
   try {
-    // Write compressed archive to temp file using .pipe() (not pipeline() which auto-destroys streams)
-    await new Promise<void>((resolve, reject) => {
-      const writeStream = createWriteStream(tmpFilePath);
-      pageArchiver.pipe(writeStream);
-      writeStream.on('close', resolve);
-      writeStream.on('error', reject);
-      pageArchiver.on('error', reject);
-    });
-    logger.info('archive written to temp file');
-
-    // Get file size for Content-Length
-    const stat = await fs.stat(tmpFilePath);
-    attachment.fileSize = stat.size;
-    logger.info(`temp file size: ${stat.size}`);
-
-    // Upload using createReadStream (native ReadStream with .path property)
-    logger.info('starting upload');
-    const readStream = createReadStream(tmpFilePath);
-    await fileUploadService.uploadAttachment(readStream, attachment);
-    logger.info('upload completed, running postProcess');
-
-    await postProcess.bind(this)(pageBulkExportJob, attachment, stat.size);
-    logger.info('postProcess completed');
+    await fileUploadService.uploadAttachment(uploadStream, attachment);
+    await postProcess.bind(this)(
+      pageBulkExportJob,
+      attachment,
+      pageArchiver.pointer(),
+    );
   } catch (e) {
   } catch (e) {
-    logger.error('error caught', e);
+    logger.error(e);
     await this.handleError(e, pageBulkExportJob);
     await this.handleError(e, pageBulkExportJob);
-  } finally {
-    logger.info('finally block, cleaning up');
-    // Clean up temp file
-    await fs.unlink(tmpFilePath).catch(() => {});
   }
   }
 }
 }

+ 31 - 20
apps/app/src/server/service/file-uploader/aws/index.ts

@@ -13,6 +13,7 @@ import {
   PutObjectCommand,
   PutObjectCommand,
   S3Client,
   S3Client,
 } from '@aws-sdk/client-s3';
 } from '@aws-sdk/client-s3';
+import { Upload } from '@aws-sdk/lib-storage';
 import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
 import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
 import type { NonBlankString } from '@growi/core/dist/interfaces';
 import type { NonBlankString } from '@growi/core/dist/interfaces';
 import { toNonBlankStringOrUndefined } from '@growi/core/dist/interfaces';
 import { toNonBlankStringOrUndefined } from '@growi/core/dist/interfaces';
@@ -252,30 +253,40 @@ class AwsFileUploader extends AbstractFileUploader {
     const filePath = getFilePathOnStorage(attachment);
     const filePath = getFilePathOnStorage(attachment);
     const contentHeaders = createContentHeaders(attachment);
     const contentHeaders = createContentHeaders(attachment);
 
 
-    try {
-      const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+    const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
 
 
-      await s3.send(
-        new PutObjectCommand({
-          Bucket: getS3Bucket(),
-          Key: filePath,
-          Body: readable,
-          ACL: getS3PutObjectCannedAcl(),
-          // put type and the file name for reference information when uploading
-          ContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
-          ContentDisposition: getContentHeaderValue(
-            contentHeaders,
-            'Content-Disposition',
-          ),
-        }),
-        { abortSignal: AbortSignal.timeout(uploadTimeout) },
-      );
+    // Use @aws-sdk/lib-storage Upload to handle streaming uploads:
+    // - Resolves archiver's readable-stream (npm) failing AWS SDK's instanceof Readable check
+    // - Avoids Transfer-Encoding: chunked which S3 rejects with 501 (PutObjectCommand issue)
+    // - Under 5MB: falls back to PutObjectCommand internally
+    // - Over 5MB: uses multipart upload (requires s3:AbortMultipartUpload permission)
+    const upload = new Upload({
+      client: s3,
+      params: {
+        Bucket: getS3Bucket(),
+        Key: filePath,
+        Body: readable,
+        ACL: getS3PutObjectCannedAcl(),
+        ContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+        ContentDisposition: getContentHeaderValue(
+          contentHeaders,
+          'Content-Disposition',
+        ),
+      },
+    });
+
+    const timeoutId = setTimeout(() => {
+      logger.warn(`Upload timeout: fileName=${attachment.fileName}`);
+      upload.abort();
+    }, uploadTimeout);
+
+    try {
+      await upload.done();
 
 
       logger.debug(
       logger.debug(
         `File upload completed successfully: fileName=${attachment.fileName}`,
         `File upload completed successfully: fileName=${attachment.fileName}`,
       );
       );
     } catch (error) {
     } catch (error) {
-      // Handle timeout error specifically
       if (error.name === 'AbortError') {
       if (error.name === 'AbortError') {
         logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
         logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
       } else {
       } else {
@@ -284,9 +295,9 @@ class AwsFileUploader extends AbstractFileUploader {
           error,
           error,
         );
         );
       }
       }
-      // Re-throw the error to be handled by the caller.
-      // The pipeline automatically handles stream cleanup on error.
       throw error;
       throw error;
+    } finally {
+      clearTimeout(timeoutId);
     }
     }
   }
   }
 
 

+ 43 - 6
pnpm-lock.yaml

@@ -169,6 +169,9 @@ importers:
       '@aws-sdk/client-s3':
       '@aws-sdk/client-s3':
         specifier: 3.454.0
         specifier: 3.454.0
         version: 3.454.0
         version: 3.454.0
+      '@aws-sdk/lib-storage':
+        specifier: 3.454.0
+        version: 3.454.0(@aws-sdk/client-s3@3.454.0)
       '@aws-sdk/s3-request-presigner':
       '@aws-sdk/s3-request-presigner':
         specifier: 3.454.0
         specifier: 3.454.0
         version: 3.454.0
         version: 3.454.0
@@ -2045,6 +2048,12 @@ packages:
     resolution: {integrity: sha512-cC9uqmX0rgx1efiJGqeR+i0EXr8RQ5SAzH7M45WNBZpYiLEe6reWgIYJY9hmOxuaoMdWSi8kekuN3IjTIORRjw==}
     resolution: {integrity: sha512-cC9uqmX0rgx1efiJGqeR+i0EXr8RQ5SAzH7M45WNBZpYiLEe6reWgIYJY9hmOxuaoMdWSi8kekuN3IjTIORRjw==}
     engines: {node: '>=16.0.0'}
     engines: {node: '>=16.0.0'}
 
 
+  '@aws-sdk/lib-storage@3.454.0':
+    resolution: {integrity: sha512-UygsmdtIwty9GJqBoCqTQeX/dwE2Oo/m3P5UzuUr2veC6AEuYQyMIvmSgLVEO/ek3hfK86kmRBff7VTGWUuN8Q==}
+    engines: {node: '>=14.0.0'}
+    peerDependencies:
+      '@aws-sdk/client-s3': ^3.0.0
+
   '@aws-sdk/middleware-bucket-endpoint@3.451.0':
   '@aws-sdk/middleware-bucket-endpoint@3.451.0':
     resolution: {integrity: sha512-KWyZ1JGnYz2QbHuJtYTP1BVnMOfVopR8rP8dTinVb/JR5HfAYz4imICJlJUbOYRjN7wpA3PrRI8dNRjrSBjWJg==}
     resolution: {integrity: sha512-KWyZ1JGnYz2QbHuJtYTP1BVnMOfVopR8rP8dTinVb/JR5HfAYz4imICJlJUbOYRjN7wpA3PrRI8dNRjrSBjWJg==}
     engines: {node: '>=14.0.0'}
     engines: {node: '>=14.0.0'}
@@ -6381,6 +6390,9 @@ packages:
   buffer@4.9.2:
   buffer@4.9.2:
     resolution: {integrity: sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==}
     resolution: {integrity: sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==}
 
 
+  buffer@5.6.0:
+    resolution: {integrity: sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw==}
+
   buffer@5.7.1:
   buffer@5.7.1:
     resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==}
     resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==}
 
 
@@ -8682,28 +8694,29 @@ packages:
 
 
   glob@10.4.5:
   glob@10.4.5:
     resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==}
     resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==}
+    deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me
     hasBin: true
     hasBin: true
 
 
   glob@6.0.4:
   glob@6.0.4:
     resolution: {integrity: sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==}
     resolution: {integrity: sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==}
-    deprecated: Glob versions prior to v9 are no longer supported
+    deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me
 
 
   glob@7.1.6:
   glob@7.1.6:
     resolution: {integrity: sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==}
     resolution: {integrity: sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==}
-    deprecated: Glob versions prior to v9 are no longer supported
+    deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me
 
 
   glob@7.2.0:
   glob@7.2.0:
     resolution: {integrity: sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==}
     resolution: {integrity: sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==}
-    deprecated: Glob versions prior to v9 are no longer supported
+    deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me
 
 
   glob@7.2.3:
   glob@7.2.3:
     resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==}
     resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==}
-    deprecated: Glob versions prior to v9 are no longer supported
+    deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me
 
 
   glob@8.1.0:
   glob@8.1.0:
     resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==}
     resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==}
     engines: {node: '>=12'}
     engines: {node: '>=12'}
-    deprecated: Glob versions prior to v9 are no longer supported
+    deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me
 
 
   global-directory@4.0.1:
   global-directory@4.0.1:
     resolution: {integrity: sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==}
     resolution: {integrity: sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==}
@@ -12880,6 +12893,9 @@ packages:
     resolution: {integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==}
     resolution: {integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==}
     engines: {node: '>=4', npm: '>=6'}
     engines: {node: '>=4', npm: '>=6'}
 
 
+  stream-browserify@3.0.0:
+    resolution: {integrity: sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA==}
+
   stream-buffers@0.2.6:
   stream-buffers@0.2.6:
     resolution: {integrity: sha512-ZRpmWyuCdg0TtNKk8bEqvm13oQvXMmzXDsfD4cBgcx5LouborvU5pm3JMkdTP3HcszyUI08AM1dHMXA5r2g6Sg==}
     resolution: {integrity: sha512-ZRpmWyuCdg0TtNKk8bEqvm13oQvXMmzXDsfD4cBgcx5LouborvU5pm3JMkdTP3HcszyUI08AM1dHMXA5r2g6Sg==}
     engines: {node: '>= 0.3.0'}
     engines: {node: '>= 0.3.0'}
@@ -13231,7 +13247,7 @@ packages:
   tar@6.2.1:
   tar@6.2.1:
     resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==}
     resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==}
     engines: {node: '>=10'}
     engines: {node: '>=10'}
-    deprecated: Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exhorbitant rates) by contacting i@izs.me
+    deprecated: Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me
 
 
   teeny-request@7.2.0:
   teeny-request@7.2.0:
     resolution: {integrity: sha512-SyY0pek1zWsi0LRVAALem+avzMLc33MKW/JLLakdP4s9+D7+jHcy5x6P+h94g2QNZsAqQNfX5lsbd3WSeJXrrw==}
     resolution: {integrity: sha512-SyY0pek1zWsi0LRVAALem+avzMLc33MKW/JLLakdP4s9+D7+jHcy5x6P+h94g2QNZsAqQNfX5lsbd3WSeJXrrw==}
@@ -15289,6 +15305,17 @@ snapshots:
       - aws-crt
       - aws-crt
     optional: true
     optional: true
 
 
+  '@aws-sdk/lib-storage@3.454.0(@aws-sdk/client-s3@3.454.0)':
+    dependencies:
+      '@aws-sdk/client-s3': 3.454.0
+      '@smithy/abort-controller': 2.2.0
+      '@smithy/middleware-endpoint': 2.5.1
+      '@smithy/smithy-client': 2.5.1
+      buffer: 5.6.0
+      events: 3.3.0
+      stream-browserify: 3.0.0
+      tslib: 2.8.1
+
   '@aws-sdk/middleware-bucket-endpoint@3.451.0':
   '@aws-sdk/middleware-bucket-endpoint@3.451.0':
     dependencies:
     dependencies:
       '@aws-sdk/types': 3.451.0
       '@aws-sdk/types': 3.451.0
@@ -21246,6 +21273,11 @@ snapshots:
       ieee754: 1.2.1
       ieee754: 1.2.1
       isarray: 1.0.0
       isarray: 1.0.0
 
 
+  buffer@5.6.0:
+    dependencies:
+      base64-js: 1.5.1
+      ieee754: 1.2.1
+
   buffer@5.7.1:
   buffer@5.7.1:
     dependencies:
     dependencies:
       base64-js: 1.5.1
       base64-js: 1.5.1
@@ -28648,6 +28680,11 @@ snapshots:
 
 
   stoppable@1.1.0: {}
   stoppable@1.1.0: {}
 
 
+  stream-browserify@3.0.0:
+    dependencies:
+      inherits: 2.0.4
+      readable-stream: 3.6.0
+
   stream-buffers@0.2.6: {}
   stream-buffers@0.2.6: {}
 
 
   stream-events@1.0.5:
   stream-events@1.0.5: