Просмотр исходного кода

fix: add log for bulk export debugging

Ryotaro Nagahara 1 месяц назад
Родитель
Сommit
f5a551ff1c

+ 20 - 2
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/index.ts

@@ -196,6 +196,10 @@ class PageBulkExportJobCronService
       pageBulkExportJob.statusOnPreviousCronExec = pageBulkExportJob.status;
       await pageBulkExportJob.save();
 
+      logger.info(
+        `proceedBulkExportJob: status=${pageBulkExportJob.status}, jobId=${pageBulkExportJob._id}`,
+      );
+
       if (pageBulkExportJob.status === PageBulkExportJobStatus.initializing) {
         await createPageSnapshotsAsync.bind(this)(user, pageBulkExportJob);
       } else if (
@@ -205,7 +209,17 @@ class PageBulkExportJobCronService
       } else if (
         pageBulkExportJob.status === PageBulkExportJobStatus.uploading
       ) {
-        compressAndUpload.bind(this)(user, pageBulkExportJob);
+        logger.info(
+          'proceedBulkExportJob: calling compressAndUpload (not awaited)',
+        );
+        compressAndUpload
+          .bind(this)(user, pageBulkExportJob)
+          .catch((err) => {
+            logger.error(
+              'proceedBulkExportJob: unhandled compressAndUpload error',
+              err,
+            );
+          });
       }
     } catch (err) {
       logger.error(err);
@@ -225,10 +239,14 @@ class PageBulkExportJobCronService
     err: Error | null,
     pageBulkExportJob: PageBulkExportJobDocument,
   ) {
+    logger.info('handleError: called', {
+      errType: err?.constructor.name,
+      errMessage: err?.message,
+    });
     if (err == null) return;
 
     if (err instanceof BulkExportJobExpiredError) {
-      logger.error(err);
+      logger.error('handleError: BulkExportJobExpiredError', err);
       await this.notifyExportResultAndCleanUp(
         SupportedAction.ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED,
         pageBulkExportJob,

+ 9 - 1
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/compress-and-upload.ts

@@ -76,32 +76,40 @@ export async function compressAndUpload(
 
   const fileUploadService: FileUploader = this.crowi.fileUploadService;
 
+  logger.info('compressAndUpload: starting');
+
   // Wrap with Node.js native PassThrough so that AWS SDK recognizes the stream as a native Readable
   const uploadStream = new PassThrough();
 
   // Establish pipe before finalize to ensure data flows correctly
   pageArchiver.pipe(uploadStream);
   pageArchiver.on('error', (err) => {
+    logger.error('compressAndUpload: pageArchiver error', err);
     uploadStream.destroy(err);
     pageArchiver.destroy();
   });
 
   pageArchiver.directory(this.getTmpOutputDir(pageBulkExportJob), false);
   pageArchiver.finalize();
+  logger.info('compressAndUpload: finalize called');
 
   this.setStreamsInExecution(pageBulkExportJob._id, pageArchiver, uploadStream);
 
   try {
+    logger.info('compressAndUpload: starting upload');
     await fileUploadService.uploadAttachment(uploadStream, attachment);
+    logger.info('compressAndUpload: upload completed, running postProcess');
     await postProcess.bind(this)(
       pageBulkExportJob,
       attachment,
       pageArchiver.pointer(),
     );
+    logger.info('compressAndUpload: postProcess completed');
   } catch (e) {
-    logger.error(e);
+    logger.error('compressAndUpload: error caught', e);
     this.handleError(e, pageBulkExportJob);
   } finally {
+    logger.info('compressAndUpload: finally block, destroying streams');
     pageArchiver.destroy();
     uploadStream.destroy();
   }

+ 12 - 14
apps/app/src/server/service/file-uploader/aws/index.ts

@@ -245,7 +245,9 @@ class AwsFileUploader extends AbstractFileUploader {
       throw new Error('AWS is not configured.');
     }
 
-    logger.debug(`File uploading: fileName=${attachment.fileName}`);
+    logger.info(
+      `uploadAttachment: starting, fileName=${attachment.fileName}, readableType=${readable.constructor.name}, isReadable=${readable.readable}`,
+    );
 
     const s3 = S3Factory();
 
@@ -254,6 +256,9 @@ class AwsFileUploader extends AbstractFileUploader {
 
     try {
       const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+      logger.info(
+        `uploadAttachment: sending PutObjectCommand, bucket=${getS3Bucket()}, key=${filePath}, timeout=${uploadTimeout}`,
+      );
 
       await s3.send(
         new PutObjectCommand({
@@ -271,21 +276,14 @@ class AwsFileUploader extends AbstractFileUploader {
         { abortSignal: AbortSignal.timeout(uploadTimeout) },
       );
 
-      logger.debug(
-        `File upload completed successfully: fileName=${attachment.fileName}`,
+      logger.info(
+        `uploadAttachment: completed successfully, fileName=${attachment.fileName}`,
       );
     } catch (error) {
-      // Handle timeout error specifically
-      if (error.name === 'AbortError') {
-        logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
-      } else {
-        logger.error(
-          `File upload failed: fileName=${attachment.fileName}`,
-          error,
-        );
-      }
-      // Re-throw the error to be handled by the caller.
-      // The pipeline automatically handles stream cleanup on error.
+      logger.error(
+        `uploadAttachment: failed, fileName=${attachment.fileName}, errorName=${error.name}, errorMessage=${error.message}`,
+        error,
+      );
       throw error;
     }
   }