Futa Arai 1 год назад
Родитель
Сommit
b959b02668

+ 0 - 256
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-old.ts

@@ -1,256 +0,0 @@
-import type { Readable } from 'stream';
-import { Writable, pipeline } from 'stream';
-
-import type { HasObjectId } from '@growi/core';
-import { type IPage, isPopulated, SubscriptionStatusType } from '@growi/core';
-import { normalizePath } from '@growi/core/dist/utils/path-utils';
-import type { Archiver } from 'archiver';
-import archiver from 'archiver';
-import type { QueueObject } from 'async';
-import gc from 'expose-gc/function';
-import mongoose from 'mongoose';
-
-import type { SupportedActionType } from '~/interfaces/activity';
-import { SupportedAction, SupportedTargetModel } from '~/interfaces/activity';
-import { AttachmentType, FilePathOnStoragePrefix } from '~/server/interfaces/attachment';
-import type { IAttachmentDocument } from '~/server/models';
-import { Attachment } from '~/server/models';
-import type { ActivityDocument } from '~/server/models/activity';
-import type { PageModel, PageDocument } from '~/server/models/page';
-import Subscription from '~/server/models/subscription';
-import type { IAwsMultipartUploader } from '~/server/service/file-uploader/aws/multipart-upload';
-import { preNotifyService } from '~/server/service/pre-notify';
-import { getBufferToFixedSizeTransform } from '~/server/util/stream';
-import loggerFactory from '~/utils/logger';
-
-import { PageBulkExportFormat } from '../../interfaces/page-bulk-export';
-import type { PageBulkExportJobDocument } from '../models/page-bulk-export-job';
-import PageBulkExportJob from '../models/page-bulk-export-job';
-
-const logger = loggerFactory('growi:services:PageBulkExportService');
-
-// Custom type for back pressure workaround
-interface ArchiverWithQueue extends Archiver {
-  _queue?: QueueObject<any>;
-}
-
-type ActivityParameters ={
-  ip: string;
-  endpoint: string;
-}
-
-class PageBulkExportService {
-
-  crowi: any;
-
-  activityEvent: any;
-
-  // multipart upload part size
-  maxPartSize = 5 * 1024 * 1024; // 5MB
-
-  pageBatchSize = 100;
-
-  constructor(crowi) {
-    this.crowi = crowi;
-    this.activityEvent = crowi.event('activity');
-  }
-
-  async bulkExportWithBasePagePath(basePagePath: string, currentUser, activityParameters: ActivityParameters): Promise<void> {
-    const Page = mongoose.model<IPage, PageModel>('Page');
-    const basePage = await Page.findByPathAndViewer(basePagePath, currentUser, null, true);
-
-    if (basePage == null) {
-      throw new Error('Base page not found or not accessible');
-    }
-
-    const timeStamp = (new Date()).getTime();
-    const originalName = `page-bulk-export-${timeStamp}.zip`;
-    const attachment = Attachment.createWithoutSave(null, currentUser, originalName, 'zip', 0, AttachmentType.PAGE_BULK_EXPORT);
-    const uploadKey = `${FilePathOnStoragePrefix.pageBulkExport}/${attachment.fileName}`;
-
-    const pagesReadable = this.getPageReadable(basePagePath);
-    const zipArchiver = this.setUpZipArchiver();
-    const pagesWritable = this.getPageWritable(zipArchiver);
-    const bufferToPartSizeTransform = getBufferToFixedSizeTransform(this.maxPartSize);
-
-    // init multipart upload
-    // TODO: Create abstract interface IMultipartUploader in https://redmine.weseek.co.jp/issues/135775
-    const multipartUploader: IAwsMultipartUploader | undefined = this.crowi?.fileUploadService?.createMultipartUploader(uploadKey);
-    let pageBulkExportJob: PageBulkExportJobDocument & HasObjectId;
-    if (multipartUploader == null) {
-      throw Error('Multipart upload not available for configured file upload type');
-    }
-    try {
-      await multipartUploader.initUpload();
-      pageBulkExportJob = await PageBulkExportJob.create({
-        user: currentUser,
-        page: basePage,
-        uploadId: multipartUploader.uploadId,
-        format: PageBulkExportFormat.markdown,
-      });
-      await Subscription.upsertSubscription(currentUser, SupportedTargetModel.MODEL_PAGE_BULK_EXPORT_JOB, pageBulkExportJob, SubscriptionStatusType.SUBSCRIBE);
-    }
-    catch (err) {
-      logger.error(err);
-      await multipartUploader.abortUpload();
-      throw err;
-    }
-
-    const multipartUploadWritable = this.getMultipartUploadWritable(multipartUploader, pageBulkExportJob, attachment, activityParameters);
-
-    // Cannot directly pipe from pagesWritable to zipArchiver due to how the 'append' method works.
-    // Hence, execution of two pipelines is required.
-    pipeline(pagesReadable, pagesWritable, err => this.handleExportErrorInStream(err, activityParameters, pageBulkExportJob, multipartUploader));
-    pipeline(zipArchiver, bufferToPartSizeTransform, multipartUploadWritable,
-      err => this.handleExportErrorInStream(err, activityParameters, pageBulkExportJob, multipartUploader));
-  }
-
-  private async handleExportErrorInStream(
-      err: Error | null, activityParameters: ActivityParameters, pageBulkExportJob: PageBulkExportJobDocument, multipartUploader: IAwsMultipartUploader,
-  ): Promise<void> {
-    if (err != null) {
-      logger.error(err);
-      await multipartUploader.abortUpload();
-      await this.notifyExportResult(activityParameters, pageBulkExportJob, SupportedAction.ACTION_PAGE_BULK_EXPORT_FAILED);
-    }
-  }
-
-  /**
-   * Get a Readable of all the pages under the specified path, including the root page.
-   */
-  private getPageReadable(basePagePath: string): Readable {
-    const Page = mongoose.model<IPage, PageModel>('Page');
-    const { PageQueryBuilder } = Page;
-
-    const builder = new PageQueryBuilder(Page.find())
-      .addConditionToListWithDescendants(basePagePath);
-
-    return builder
-      .query
-      .populate('revision')
-      .lean()
-      .cursor({ batchSize: this.pageBatchSize });
-  }
-
-  /**
-   * Get a Writable that writes the page body to a zip file
-   */
-  private getPageWritable(zipArchiver: Archiver): Writable {
-    return new Writable({
-      objectMode: true,
-      write: async(page: PageDocument, encoding, callback) => {
-        try {
-          const revision = page.revision;
-
-          if (revision != null && isPopulated(revision)) {
-            const markdownBody = revision.body;
-            const pathNormalized = normalizePath(page.path);
-            // Since archiver does not provide a proper way to back pressure at the moment, use the _queue property as a workaround
-            // ref: https://github.com/archiverjs/node-archiver/issues/611
-            const { _queue } = zipArchiver.append(markdownBody, { name: `${pathNormalized}.md` }) as ArchiverWithQueue;
-            if (_queue == null) {
-              throw Error('Cannot back pressure the export pipeline. Aborting the export.');
-            }
-            if (_queue.length() > this.pageBatchSize) {
-              await _queue.drain();
-            }
-          }
-        }
-        catch (err) {
-          callback(err);
-          return;
-        }
-        callback();
-      },
-      final: (callback) => {
-        zipArchiver.finalize();
-        callback();
-      },
-    });
-  }
-
-  private setUpZipArchiver(): Archiver {
-    const zipArchiver = archiver('zip', {
-      zlib: { level: 9 }, // maximum compression
-    });
-
-    // good practice to catch warnings (ie stat failures and other non-blocking errors)
-    zipArchiver.on('warning', (err) => {
-      if (err.code === 'ENOENT') logger.error(err);
-      else throw err;
-    });
-
-    return zipArchiver;
-  }
-
-  private getMultipartUploadWritable(
-      multipartUploader: IAwsMultipartUploader,
-      pageBulkExportJob: PageBulkExportJobDocument,
-      attachment: IAttachmentDocument,
-      activityParameters: ActivityParameters,
-  ): Writable {
-    let partNumber = 1;
-
-    return new Writable({
-      write: async(part: Buffer, encoding, callback) => {
-        try {
-          await multipartUploader.uploadPart(part, partNumber);
-          partNumber += 1;
-          // First aid to prevent unexplained memory leaks
-          logger.info('global.gc() invoked.');
-          gc();
-        }
-        catch (err) {
-          callback(err);
-          return;
-        }
-        callback();
-      },
-      final: async(callback) => {
-        try {
-          await multipartUploader.completeUpload();
-
-          const fileSize = await multipartUploader.getUploadedFileSize();
-          attachment.fileSize = fileSize;
-          await attachment.save();
-
-          pageBulkExportJob.completedAt = new Date();
-          pageBulkExportJob.attachment = attachment._id;
-          await pageBulkExportJob.save();
-
-          await this.notifyExportResult(activityParameters, pageBulkExportJob, SupportedAction.ACTION_PAGE_BULK_EXPORT_COMPLETED);
-        }
-        catch (err) {
-          callback(err);
-          return;
-        }
-        callback();
-      },
-    });
-  }
-
-  private async notifyExportResult(
-      activityParameters: ActivityParameters, pageBulkExportJob: PageBulkExportJobDocument, action: SupportedActionType,
-  ) {
-    const activity = await this.crowi.activityService.createActivity({
-      ...activityParameters,
-      action,
-      targetModel: SupportedTargetModel.MODEL_PAGE_BULK_EXPORT_JOB,
-      target: pageBulkExportJob,
-      user: pageBulkExportJob.user,
-      snapshot: {
-        username: isPopulated(pageBulkExportJob.user) ? pageBulkExportJob.user.username : '',
-      },
-    });
-    const getAdditionalTargetUsers = (activity: ActivityDocument) => [activity.user];
-    const preNotify = preNotifyService.generatePreNotify(activity, getAdditionalTargetUsers);
-    this.activityEvent.emit('updated', activity, pageBulkExportJob, preNotify);
-  }
-
-}
-
-// eslint-disable-next-line import/no-mutable-exports
-export let pageBulkExportService: PageBulkExportService | undefined; // singleton instance
-export default function instanciate(crowi): void {
-  pageBulkExportService = new PageBulkExportService(crowi);
-}

+ 7 - 9
apps/app/src/features/page-bulk-export/server/service/page-bulk-export.ts

@@ -18,7 +18,8 @@ import { Attachment } from '~/server/models';
 import type { ActivityDocument } from '~/server/models/activity';
 import type { PageModel, PageDocument } from '~/server/models/page';
 import Subscription from '~/server/models/subscription';
-import type { IGcsMultipartUploader } from '~/server/service/file-uploader/gcs/multipart-upload';
+import type { FileUploader } from '~/server/service/file-uploader';
+import type { IMultipartUploader } from '~/server/service/file-uploader/multipart-uploader';
 import { preNotifyService } from '~/server/service/pre-notify';
 import { getBufferToFixedSizeTransform } from '~/server/util/stream';
 import loggerFactory from '~/utils/logger';
@@ -74,12 +75,9 @@ class PageBulkExportService {
     const bufferToPartSizeTransform = getBufferToFixedSizeTransform(this.maxPartSize);
 
     // init multipart upload
-    // TODO: Create abstract interface IMultipartUploader in https://redmine.weseek.co.jp/issues/135775
-    const multipartUploader: IGcsMultipartUploader | undefined = this.crowi?.fileUploadService?.createMultipartUploader(uploadKey, this.maxPartSize);
+    const fileUploadService: FileUploader = this.crowi.fileUploadService;
+    const multipartUploader: IMultipartUploader = fileUploadService.createMultipartUploader(uploadKey, this.maxPartSize);
     let pageBulkExportJob: PageBulkExportJobDocument & HasObjectId;
-    if (multipartUploader == null) {
-      throw Error('Multipart upload not available for configured file upload type');
-    }
     try {
       await multipartUploader.initUpload();
       pageBulkExportJob = await PageBulkExportJob.create({
@@ -106,7 +104,7 @@ class PageBulkExportService {
   }
 
   private async handleExportErrorInStream(
-      err: Error | null, activityParameters: ActivityParameters, pageBulkExportJob: PageBulkExportJobDocument, multipartUploader: IGcsMultipartUploader,
+      err: Error | null, activityParameters: ActivityParameters, pageBulkExportJob: PageBulkExportJobDocument, multipartUploader: IMultipartUploader,
   ): Promise<void> {
     if (err != null) {
       await multipartUploader.abortUpload();
@@ -183,7 +181,7 @@ class PageBulkExportService {
   }
 
   private getMultipartUploadWritable(
-      multipartUploader: IGcsMultipartUploader,
+      multipartUploader: IMultipartUploader,
       pageBulkExportJob: PageBulkExportJobDocument,
       attachment: IAttachmentDocument,
       activityParameters: ActivityParameters,
@@ -193,7 +191,7 @@ class PageBulkExportService {
     return new Writable({
       write: async(part: Buffer, encoding, callback) => {
         try {
-          await multipartUploader.uploadPart(part, partNumber, this.maxPartSize);
+          await multipartUploader.uploadPart(part, partNumber);
           partNumber += 1;
           // First aid to prevent unexplained memory leaks
           logger.info('global.gc() invoked.');

+ 4 - 9
apps/app/src/server/service/file-uploader/aws/index.ts

@@ -24,8 +24,7 @@ import {
 } from '../file-uploader';
 import { ContentHeaders } from '../utils';
 
-import type { IAwsMultipartUploader } from './multipart-upload';
-import { AwsMultipartUploader } from './multipart-upload';
+import { AwsMultipartUploader } from './multipart-uploader';
 
 
 const logger = loggerFactory('growi:service:fileUploaderAws');
@@ -88,12 +87,8 @@ const getFilePathOnStorage = (attachment: IAttachmentDocument) => {
   return filePath;
 };
 
-export interface IAwsFileUploader {
-  createMultipartUploader: (uploadKey: string) => IAwsMultipartUploader
-}
-
 // TODO: rewrite this module to be a type-safe implementation
-class AwsFileUploader extends AbstractFileUploader implements IAwsFileUploader {
+class AwsFileUploader extends AbstractFileUploader {
 
   /**
    * @inheritdoc
@@ -214,9 +209,9 @@ class AwsFileUploader extends AbstractFileUploader implements IAwsFileUploader {
 
   }
 
-  createMultipartUploader(uploadKey: string) {
+  override createMultipartUploader(uploadKey: string, maxPartSize: number) {
     const s3 = S3Factory();
-    return new AwsMultipartUploader(s3, getS3Bucket(), uploadKey);
+    return new AwsMultipartUploader(s3, getS3Bucket(), uploadKey, maxPartSize);
   }
 
 }

+ 8 - 52
apps/app/src/server/service/file-uploader/aws/multipart-upload.ts → apps/app/src/server/service/file-uploader/aws/multipart-uploader.ts

@@ -5,6 +5,9 @@ import {
 
 import loggerFactory from '~/utils/logger';
 
+import { MultipartUploader, type IMultipartUploader } from '../multipart-uploader';
+
+
 const logger = loggerFactory('growi:services:fileUploaderAws:multipartUploader');
 
 enum UploadStatus {
@@ -14,15 +17,7 @@ enum UploadStatus {
   ABORTED
 }
 
-// Create abstract interface IMultipartUploader in https://redmine.weseek.co.jp/issues/135775
-export interface IAwsMultipartUploader {
-  initUpload(): Promise<void>;
-  uploadPart(part: Buffer, partNumber: number): Promise<void>;
-  completeUpload(): Promise<void>;
-  abortUpload(): Promise<void>;
-  uploadId: string;
-  getUploadedFileSize(): Promise<number>;
-}
+export type IAwsMultipartUploader = IMultipartUploader
 
 /**
  * Class for uploading files to S3 using multipart upload.
@@ -30,33 +25,22 @@ export interface IAwsMultipartUploader {
  * Each instance can only be used for one multipart upload, and cannot be reused once completed.
  * TODO: Enable creation of uploader of inturrupted uploads: https://redmine.weseek.co.jp/issues/78040
  */
-export class AwsMultipartUploader implements IAwsMultipartUploader {
+export class AwsMultipartUploader extends MultipartUploader implements IAwsMultipartUploader {
 
   private bucket: string | undefined;
 
-  private uploadKey: string;
-
-  private _uploadId: string | undefined;
-
   private s3Client: S3Client;
 
   private parts: { PartNumber: number; ETag: string | undefined; }[] = [];
 
-  private currentStatus: UploadStatus = UploadStatus.BEFORE_INIT;
-
-  private _uploadedFileSize = 0;
+  constructor(s3Client: S3Client, bucket: string | undefined, uploadKey: string, maxPartSize: number) {
+    super(uploadKey, maxPartSize);
 
-  constructor(s3Client: S3Client, bucket: string | undefined, uploadKey: string) {
     this.s3Client = s3Client;
     this.bucket = bucket;
     this.uploadKey = uploadKey;
   }
 
-  get uploadId(): string {
-    if (this._uploadId == null) throw Error('UploadId is empty');
-    return this._uploadId;
-  }
-
   async initUpload(): Promise<void> {
     this.validateUploadStatus(UploadStatus.BEFORE_INIT);
 
@@ -74,6 +58,7 @@ export class AwsMultipartUploader implements IAwsMultipartUploader {
 
   async uploadPart(part: Buffer, partNumber: number): Promise<void> {
     this.validateUploadStatus(UploadStatus.IN_PROGRESS);
+    this.validatePartSize(part.length);
 
     const uploadMetaData = await this.s3Client.send(new UploadPartCommand({
       Body: part,
@@ -129,33 +114,4 @@ export class AwsMultipartUploader implements IAwsMultipartUploader {
     return this._uploadedFileSize;
   }
 
-  private validateUploadStatus(desiredStatus: UploadStatus): void {
-    if (desiredStatus === this.currentStatus) return;
-
-    let errMsg: string | null = null;
-
-    if (this.currentStatus === UploadStatus.COMPLETED) {
-      errMsg = 'Multipart upload has already been completed';
-    }
-
-    if (this.currentStatus === UploadStatus.ABORTED) {
-      errMsg = 'Multipart upload has been aborted';
-    }
-
-    // currentStatus is IN_PROGRESS or BEFORE_INIT
-
-    if (this.currentStatus === UploadStatus.IN_PROGRESS && desiredStatus === UploadStatus.BEFORE_INIT) {
-      errMsg = 'Multipart upload has already been initiated';
-    }
-
-    if (this.currentStatus === UploadStatus.BEFORE_INIT && desiredStatus === UploadStatus.IN_PROGRESS) {
-      errMsg = 'Multipart upload not initiated';
-    }
-
-    if (errMsg != null) {
-      logger.error(errMsg);
-      throw Error(errMsg);
-    }
-  }
-
 }

+ 7 - 0
apps/app/src/server/service/file-uploader/file-uploader.ts

@@ -9,6 +9,8 @@ import loggerFactory from '~/utils/logger';
 
 import { configManager } from '../config-manager';
 
+import type { MultipartUploader } from './multipart-uploader';
+
 const logger = loggerFactory('growi:service:fileUploader');
 
 
@@ -39,6 +41,7 @@ export interface FileUploader {
   respond(res: Response, attachment: IAttachmentDocument, opts?: RespondOptions): void,
   findDeliveryFile(attachment: IAttachmentDocument): Promise<NodeJS.ReadableStream>,
   generateTemporaryUrl(attachment: IAttachmentDocument, opts?: RespondOptions): Promise<TemporaryUrl>,
+  createMultipartUploader: (uploadKey: string, maxPartSize: number) => MultipartUploader,
 }
 
 export abstract class AbstractFileUploader implements FileUploader {
@@ -151,6 +154,10 @@ export abstract class AbstractFileUploader implements FileUploader {
     return ResponseMode.RELAY;
   }
 
+  createMultipartUploader(uploadKey: string, maxPartSize: number): MultipartUploader {
+    throw new Error('Multipart upload not available for file upload type');
+  }
+
   /**
    * Respond to the HTTP request.
    */

+ 3 - 7
apps/app/src/server/service/file-uploader/gcs/index.ts

@@ -14,8 +14,8 @@ import {
 } from '../file-uploader';
 import { ContentHeaders } from '../utils';
 
-import type { IGcsMultipartUploader } from './multipart-upload';
-import { GcsMultipartUploader } from './multipart-upload';
+import type { IGcsMultipartUploader } from './multipart-uploader';
+import { GcsMultipartUploader } from './multipart-uploader';
 
 const logger = loggerFactory('growi:service:fileUploaderGcs');
 
@@ -64,10 +64,6 @@ async function isFileExists(file) {
   return res[0];
 }
 
-export interface IGcsFileUploader {
-  createMultipartUploader: (uploadKey: string) => IGcsMultipartUploader
-}
-
 // TODO: rewrite this module to be a type-safe implementation
 class GcsFileUploader extends AbstractFileUploader {
 
@@ -175,7 +171,7 @@ class GcsFileUploader extends AbstractFileUploader {
 
   }
 
-  createMultipartUploader(uploadKey: string, maxPartSize: number) {
+  override createMultipartUploader(uploadKey: string, maxPartSize: number) {
     const gcs = getGcsInstance();
     const myBucket = gcs.bucket(getGcsBucket());
     return new GcsMultipartUploader(myBucket, uploadKey, maxPartSize);

+ 6 - 60
apps/app/src/server/service/file-uploader/gcs/multipart-upload.ts → apps/app/src/server/service/file-uploader/gcs/multipart-uploader.ts

@@ -2,6 +2,8 @@ import type { Bucket, File } from '@google-cloud/storage';
 
 import loggerFactory from '~/utils/logger';
 
+import { MultipartUploader, type IMultipartUploader } from '../multipart-uploader';
+
 import axios from 'src/utils/axios';
 
 const logger = loggerFactory('growi:services:fileUploaderGcs:multipartUploader');
@@ -13,15 +15,7 @@ enum UploadStatus {
   ABORTED
 }
 
-// Create abstract interface IMultipartUploader in https://redmine.weseek.co.jp/issues/135775
-export interface IGcsMultipartUploader {
-  initUpload(): Promise<void>;
-  uploadPart(body: Buffer, partNumber: number, maxPartSize?: number): Promise<void>;
-  completeUpload(): Promise<void>;
-  abortUpload(): Promise<void>;
-  uploadId: string;
-  getUploadedFileSize(): Promise<number>;
-}
+export type IGcsMultipartUploader = IMultipartUploader
 
 /**
  * Class for uploading files to GCS using multipart upload.
@@ -29,35 +23,20 @@ export interface IGcsMultipartUploader {
  * Each instance can only be used for one multipart upload, and cannot be reused once completed.
  * TODO: Enable creation of uploader of inturrupted uploads: https://redmine.weseek.co.jp/issues/78040
  */
-export class GcsMultipartUploader implements IGcsMultipartUploader {
-
-  private uploadKey: string;
-
-  private _uploadId: string | undefined; // URL of GCS resumable upload
+export class GcsMultipartUploader extends MultipartUploader implements IGcsMultipartUploader {
 
   private file: File;
 
-  private currentStatus: UploadStatus = UploadStatus.BEFORE_INIT;
-
-  private _uploadedFileSize = 0;
-
   // ref: https://cloud.google.com/storage/docs/performing-resumable-uploads?hl=en#chunked-upload
   private readonly minPartSize = 256 * 1024; // 256KB
 
-  private readonly maxPartSize: number;
-
   constructor(bucket: Bucket, uploadKey: string, maxPartSize: number) {
+    super(uploadKey, maxPartSize);
+
     this.validateUploadPartSize(maxPartSize);
-    this.maxPartSize = maxPartSize;
-    this.uploadKey = uploadKey;
     this.file = bucket.file(this.uploadKey);
   }
 
-  get uploadId(): string {
-    if (this._uploadId == null) throw Error('UploadId is empty');
-    return this._uploadId;
-  }
-
   async initUpload(): Promise<void> {
     this.validateUploadStatus(UploadStatus.BEFORE_INIT);
 
@@ -152,37 +131,4 @@ export class GcsMultipartUploader implements IGcsMultipartUploader {
     if (uploadPartSize > this.minPartSize && uploadPartSize % this.minPartSize !== 0) throw Error(`uploadPartSize must be a multiple of ${this.minPartSize}`);
   }
 
-  private validatePartSize(partSize) {
-    if (partSize > this.maxPartSize) throw Error(`partSize must be less than or equal to ${this.maxPartSize}`);
-  }
-
-  private validateUploadStatus(desiredStatus: UploadStatus): void {
-    if (desiredStatus === this.currentStatus) return;
-
-    let errMsg: string | null = null;
-
-    if (this.currentStatus === UploadStatus.COMPLETED) {
-      errMsg = 'Multipart upload has already been completed';
-    }
-
-    if (this.currentStatus === UploadStatus.ABORTED) {
-      errMsg = 'Multipart upload has been aborted';
-    }
-
-    // currentStatus is IN_PROGRESS or BEFORE_INIT
-
-    if (this.currentStatus === UploadStatus.IN_PROGRESS && desiredStatus === UploadStatus.BEFORE_INIT) {
-      errMsg = 'Multipart upload has already been initiated';
-    }
-
-    if (this.currentStatus === UploadStatus.BEFORE_INIT && desiredStatus === UploadStatus.IN_PROGRESS) {
-      errMsg = 'Multipart upload not initiated';
-    }
-
-    if (errMsg != null) {
-      logger.error(errMsg);
-      throw Error(errMsg);
-    }
-  }
-
 }

+ 86 - 0
apps/app/src/server/service/file-uploader/multipart-uploader.ts

@@ -0,0 +1,86 @@
+import loggerFactory from '~/utils/logger';
+
+const logger = loggerFactory('growi:services:fileUploader:multipartUploader');
+
+enum UploadStatus {
+  BEFORE_INIT,
+  IN_PROGRESS,
+  COMPLETED,
+  ABORTED
+}
+
+export interface IMultipartUploader {
+  initUpload(): Promise<void>;
+  uploadPart(body: Buffer, partNumber: number): Promise<void>;
+  completeUpload(): Promise<void>;
+  abortUpload(): Promise<void>;
+  uploadId: string;
+  getUploadedFileSize(): Promise<number>;
+}
+
+export abstract class MultipartUploader implements IMultipartUploader {
+
+  protected uploadKey: string;
+
+  protected _uploadId: string | undefined;
+
+  protected currentStatus: UploadStatus = UploadStatus.BEFORE_INIT;
+
+  protected _uploadedFileSize = 0;
+
+  protected readonly maxPartSize: number;
+
+  constructor(uploadKey: string, maxPartSize: number) {
+    this.maxPartSize = maxPartSize;
+    this.uploadKey = uploadKey;
+  }
+
+  get uploadId(): string {
+    if (this._uploadId == null) throw Error('UploadId is empty');
+    return this._uploadId;
+  }
+
+  abstract initUpload(): Promise<void>
+
+  abstract uploadPart(part: Buffer, partNumber: number): Promise<void>
+
+  abstract completeUpload(): Promise<void>
+
+  abstract abortUpload(): Promise<void>
+
+  abstract getUploadedFileSize(): Promise<number>
+
+  protected validatePartSize(partSize: number): void {
+    if (partSize > this.maxPartSize) throw Error(`partSize must be less than or equal to ${this.maxPartSize}`);
+  }
+
+  protected validateUploadStatus(desiredStatus: UploadStatus): void {
+    if (desiredStatus === this.currentStatus) return;
+
+    let errMsg: string | null = null;
+
+    if (this.currentStatus === UploadStatus.COMPLETED) {
+      errMsg = 'Multipart upload has already been completed';
+    }
+
+    if (this.currentStatus === UploadStatus.ABORTED) {
+      errMsg = 'Multipart upload has been aborted';
+    }
+
+    // currentStatus is IN_PROGRESS or BEFORE_INIT
+
+    if (this.currentStatus === UploadStatus.IN_PROGRESS && desiredStatus === UploadStatus.BEFORE_INIT) {
+      errMsg = 'Multipart upload has already been initiated';
+    }
+
+    if (this.currentStatus === UploadStatus.BEFORE_INIT && desiredStatus === UploadStatus.IN_PROGRESS) {
+      errMsg = 'Multipart upload not initiated';
+    }
+
+    if (errMsg != null) {
+      logger.error(errMsg);
+      throw Error(errMsg);
+    }
+  }
+
+}