Преглед изворни кода

configure biome for some app server services

Futa Arai пре 4 месеци
родитељ
комит
a1c772b6b7
41 измењених фајлова са 1353 додато и 773 уклоњено
  1. 8 0
      apps/app/.eslintrc.js
  2. 152 96
      apps/app/src/server/service/file-uploader/aws/index.ts
  3. 65 39
      apps/app/src/server/service/file-uploader/aws/multipart-uploader.ts
  4. 127 67
      apps/app/src/server/service/file-uploader/azure.ts
  5. 100 48
      apps/app/src/server/service/file-uploader/file-uploader.ts
  6. 76 54
      apps/app/src/server/service/file-uploader/gcs/index.ts
  7. 33 20
      apps/app/src/server/service/file-uploader/gcs/multipart-uploader.ts
  8. 79 40
      apps/app/src/server/service/file-uploader/gridfs.ts
  9. 2 2
      apps/app/src/server/service/file-uploader/index.ts
  10. 90 58
      apps/app/src/server/service/file-uploader/local.ts
  11. 92 29
      apps/app/src/server/service/file-uploader/multipart-uploader.spec.ts
  12. 9 11
      apps/app/src/server/service/file-uploader/multipart-uploader.ts
  13. 39 13
      apps/app/src/server/service/file-uploader/utils/headers.ts
  14. 27 11
      apps/app/src/server/service/global-notification/global-notification-mail.js
  15. 42 17
      apps/app/src/server/service/global-notification/global-notification-slack.js
  16. 16 8
      apps/app/src/server/service/global-notification/index.js
  17. 8 11
      apps/app/src/server/service/growi-bridge/index.ts
  18. 1 4
      apps/app/src/server/service/growi-bridge/unzip-stream-utils.ts
  19. 19 16
      apps/app/src/server/service/growi-info/growi-info.integ.ts
  20. 44 21
      apps/app/src/server/service/growi-info/growi-info.ts
  21. 5 4
      apps/app/src/server/service/import/construct-convert-map.integ.ts
  22. 11 6
      apps/app/src/server/service/import/construct-convert-map.ts
  23. 12 8
      apps/app/src/server/service/import/get-model-from-collection-name.ts
  24. 7 5
      apps/app/src/server/service/import/import-settings.ts
  25. 9 8
      apps/app/src/server/service/import/import.spec.ts
  26. 189 103
      apps/app/src/server/service/import/import.ts
  27. 0 1
      apps/app/src/server/service/import/index.ts
  28. 10 8
      apps/app/src/server/service/import/overwrite-function.ts
  29. 0 1
      apps/app/src/server/service/import/overwrite-params/attachmentFiles.chunks.ts
  30. 3 3
      apps/app/src/server/service/import/overwrite-params/index.ts
  31. 4 1
      apps/app/src/server/service/import/overwrite-params/pages.ts
  32. 4 1
      apps/app/src/server/service/import/overwrite-params/revisions.ts
  33. 14 7
      apps/app/src/server/service/in-app-notification/in-app-notification-utils.ts
  34. 4 4
      apps/app/src/server/service/interfaces/export.ts
  35. 3 3
      apps/app/src/server/service/interfaces/search.ts
  36. 2 2
      apps/app/src/server/service/normalize-data/convert-null-to-empty-granted-arrays.ts
  37. 15 7
      apps/app/src/server/service/normalize-data/convert-revision-page-id-to-objectid.ts
  38. 28 24
      apps/app/src/server/service/normalize-data/delete-vector-stores-orphaned-from-ai-assistant.ts
  39. 1 1
      apps/app/src/server/service/normalize-data/index.ts
  40. 3 3
      apps/app/src/server/service/normalize-data/rename-duplicate-root-pages.ts
  41. 0 8
      biome.json

+ 8 - 0
apps/app/.eslintrc.js

@@ -88,6 +88,14 @@ module.exports = {
     'src/server/service/system-events/**',
     'src/server/service/user-notification/**',
     'src/server/service/yjs/**',
+    'src/server/service/file-uploader/**',
+    'src/server/service/global-notification/**',
+    'src/server/service/growi-bridge/**',
+    'src/server/service/growi-info/**',
+    'src/server/service/import/**',
+    'src/server/service/in-app-notification/**',
+    'src/server/service/interfaces/**',
+    'src/server/service/normalize-data/**',
   ],
   settings: {
     // resolve path aliases by eslint-import-resolver-typescript

+ 152 - 96
apps/app/src/server/service/file-uploader/aws/index.ts

@@ -1,38 +1,43 @@
-import type { Readable } from 'stream';
-
-import type { GetObjectCommandInput, HeadObjectCommandInput } from '@aws-sdk/client-s3';
+import type {
+  GetObjectCommandInput,
+  HeadObjectCommandInput,
+} from '@aws-sdk/client-s3';
 import {
-  S3Client,
-  HeadObjectCommand,
-  GetObjectCommand,
-  DeleteObjectsCommand,
-  PutObjectCommand,
+  AbortMultipartUploadCommand,
   DeleteObjectCommand,
+  DeleteObjectsCommand,
+  GetObjectCommand,
+  HeadObjectCommand,
   ListObjectsCommand,
   ObjectCannedACL,
-  AbortMultipartUploadCommand,
+  PutObjectCommand,
+  S3Client,
 } from '@aws-sdk/client-s3';
 import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
 import type { NonBlankString } from '@growi/core/dist/interfaces';
 import { toNonBlankStringOrUndefined } from '@growi/core/dist/interfaces';
+import type { Readable } from 'stream';
 import urljoin from 'url-join';
 
 import type Crowi from '~/server/crowi';
 import {
-  AttachmentType, FilePathOnStoragePrefix, ResponseMode, type RespondOptions,
+  AttachmentType,
+  FilePathOnStoragePrefix,
+  type RespondOptions,
+  ResponseMode,
 } from '~/server/interfaces/attachment';
 import type { IAttachmentDocument } from '~/server/models/attachment';
 import loggerFactory from '~/utils/logger';
 
 import { configManager } from '../../config-manager';
 import {
-  AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
+  AbstractFileUploader,
+  type SaveFileParam,
+  type TemporaryUrl,
 } from '../file-uploader';
 import { createContentHeaders, getContentHeaderValue } from '../utils';
-
 import { AwsMultipartUploader } from './multipart-uploader';
 
-
 const logger = loggerFactory('growi:service:fileUploaderAws');
 
 /**
@@ -45,12 +50,11 @@ interface FileMeta {
 }
 
 // Cache holder to avoid repeated instantiation of S3 client
-let cachedS3Client: { configKey: string, client: S3Client } | null = null;
-const isFileExists = async(s3: S3Client, params: HeadObjectCommandInput) => {
+let cachedS3Client: { configKey: string; client: S3Client } | null = null;
+const isFileExists = async (s3: S3Client, params: HeadObjectCommandInput) => {
   try {
     await s3.send(new HeadObjectCommand(params));
-  }
-  catch (err) {
+  } catch (err) {
     if (err != null && err.code === 'NotFound') {
       return false;
     }
@@ -68,7 +72,9 @@ const ObjectCannedACLs = [
   ObjectCannedACL.public_read,
   ObjectCannedACL.public_read_write,
 ];
-const isValidObjectCannedACL = (acl: string | undefined): acl is ObjectCannedACL => {
+const isValidObjectCannedACL = (
+  acl: string | undefined,
+): acl is ObjectCannedACL => {
   return ObjectCannedACLs.includes(acl as ObjectCannedACL);
 };
 /**
@@ -91,8 +97,12 @@ const S3Factory = (): S3Client => {
   // Cache key based on configuration values to detect changes
   const accessKeyId = configManager.getConfig('aws:s3AccessKeyId');
   const secretAccessKey = configManager.getConfig('aws:s3SecretAccessKey');
-  const s3Region = toNonBlankStringOrUndefined(configManager.getConfig('aws:s3Region')); // Blank strings may remain in the DB, so convert with toNonBlankStringOrUndefined for safety
-  const s3CustomEndpoint = toNonBlankStringOrUndefined(configManager.getConfig('aws:s3CustomEndpoint'));
+  const s3Region = toNonBlankStringOrUndefined(
+    configManager.getConfig('aws:s3Region'),
+  ); // Blank strings may remain in the DB, so convert with toNonBlankStringOrUndefined for safety
+  const s3CustomEndpoint = toNonBlankStringOrUndefined(
+    configManager.getConfig('aws:s3CustomEndpoint'),
+  );
 
   const configKey = `${accessKeyId ?? ''}|${secretAccessKey ?? ''}|${s3Region ?? ''}|${s3CustomEndpoint ?? ''}`;
 
@@ -103,12 +113,13 @@ const S3Factory = (): S3Client => {
 
   // Create new client instance with connection pooling optimizations
   const client = new S3Client({
-    credentials: accessKeyId != null && secretAccessKey != null
-      ? {
-        accessKeyId,
-        secretAccessKey,
-      }
-      : undefined,
+    credentials:
+      accessKeyId != null && secretAccessKey != null
+        ? {
+            accessKeyId,
+            secretAccessKey,
+          }
+        : undefined,
     region: s3Region,
     endpoint: s3CustomEndpoint,
     forcePathStyle: s3CustomEndpoint != null, // s3ForcePathStyle renamed to forcePathStyle in v3
@@ -120,18 +131,17 @@ const S3Factory = (): S3Client => {
 };
 
 const getFilePathOnStorage = (attachment: IAttachmentDocument) => {
-  if (attachment.filePath != null) { // DEPRECATED: remains for backward compatibility for v3.3.x or below
+  if (attachment.filePath != null) {
+    // DEPRECATED: remains for backward compatibility for v3.3.x or below
     return attachment.filePath;
   }
 
   let dirName: string;
   if (attachment.attachmentType === AttachmentType.PAGE_BULK_EXPORT) {
     dirName = FilePathOnStoragePrefix.pageBulkExport;
-  }
-  else if (attachment.page != null) {
+  } else if (attachment.page != null) {
     dirName = FilePathOnStoragePrefix.attachment;
-  }
-  else {
+  } else {
     dirName = FilePathOnStoragePrefix.user;
   }
   const filePath = urljoin(dirName, attachment.fileName);
@@ -141,7 +151,6 @@ const getFilePathOnStorage = (attachment: IAttachmentDocument) => {
 
 // TODO: rewrite this module to be a type-safe implementation
 class AwsFileUploader extends AbstractFileUploader {
-
   /**
    * @inheritdoc
    */
@@ -174,7 +183,9 @@ class AwsFileUploader extends AbstractFileUploader {
   /**
    * @inheritdoc
    */
-  override async deleteFiles(attachments: IAttachmentDocument[]): Promise<void> {
+  override async deleteFiles(
+    attachments: IAttachmentDocument[],
+  ): Promise<void> {
     if (!this.getIsUploadable()) {
       throw new Error('AWS is not configured.');
     }
@@ -205,7 +216,9 @@ class AwsFileUploader extends AbstractFileUploader {
     // check file exists
     const isExists = await isFileExists(s3, params);
     if (!isExists) {
-      logger.warn(`Any object that relate to the Attachment (${filePath}) does not exist in AWS S3`);
+      logger.warn(
+        `Any object that relate to the Attachment (${filePath}) does not exist in AWS S3`,
+      );
       return;
     }
 
@@ -224,7 +237,10 @@ class AwsFileUploader extends AbstractFileUploader {
   /**
    * @inheritdoc
    */
-  override async uploadAttachment(readable: Readable, attachment: IAttachmentDocument): Promise<void> {
+  override async uploadAttachment(
+    readable: Readable,
+    attachment: IAttachmentDocument,
+  ): Promise<void> {
     if (!this.getIsUploadable()) {
       throw new Error('AWS is not configured.');
     }
@@ -247,20 +263,26 @@ class AwsFileUploader extends AbstractFileUploader {
           ACL: getS3PutObjectCannedAcl(),
           // put type and the file name for reference information when uploading
           ContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
-          ContentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
+          ContentDisposition: getContentHeaderValue(
+            contentHeaders,
+            'Content-Disposition',
+          ),
         }),
         { abortSignal: AbortSignal.timeout(uploadTimeout) },
       );
 
-      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
-    }
-    catch (error) {
+      logger.debug(
+        `File upload completed successfully: fileName=${attachment.fileName}`,
+      );
+    } catch (error) {
       // Handle timeout error specifically
       if (error.name === 'AbortError') {
         logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
-      }
-      else {
-        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      } else {
+        logger.error(
+          `File upload failed: fileName=${attachment.fileName}`,
+          error,
+        );
       }
       // Re-throw the error to be handled by the caller.
       // The pipeline automatically handles stream cleanup on error.
@@ -278,7 +300,9 @@ class AwsFileUploader extends AbstractFileUploader {
   /**
    * @inheritdoc
    */
-  override async findDeliveryFile(attachment: IAttachmentDocument): Promise<NodeJS.ReadableStream> {
+  override async findDeliveryFile(
+    attachment: IAttachmentDocument,
+  ): Promise<NodeJS.ReadableStream> {
     if (!this.getIsReadable()) {
       throw new Error('AWS is not configured.');
     }
@@ -294,7 +318,9 @@ class AwsFileUploader extends AbstractFileUploader {
     // check file exists
     const isExists = await isFileExists(s3, params);
     if (!isExists) {
-      throw new Error(`Any object that relate to the Attachment (${filePath}) does not exist in AWS S3`);
+      throw new Error(
+        `Any object that relate to the Attachment (${filePath}) does not exist in AWS S3`,
+      );
     }
 
     try {
@@ -306,36 +332,53 @@ class AwsFileUploader extends AbstractFileUploader {
       }
 
       return 'stream' in body
-        ? body.stream() as unknown as NodeJS.ReadableStream // get stream from Blob and cast force
-        : body as unknown as NodeJS.ReadableStream; // cast force
-    }
-    catch (err) {
-      logger.error(`Failed to get file from AWS S3 for attachment ${attachment._id.toString()}:`, err);
-      throw new Error(`Couldn't get file from AWS for the Attachment (${attachment._id.toString()})`);
+        ? (body.stream() as unknown as NodeJS.ReadableStream) // get stream from Blob and cast force
+        : (body as unknown as NodeJS.ReadableStream); // cast force
+    } catch (err) {
+      logger.error(
+        `Failed to get file from AWS S3 for attachment ${attachment._id.toString()}:`,
+        err,
+      );
+      throw new Error(
+        `Couldn't get file from AWS for the Attachment (${attachment._id.toString()})`,
+      );
     }
   }
 
   /**
    * @inheritDoc
    */
-  override async generateTemporaryUrl(attachment: IAttachmentDocument, opts?: RespondOptions): Promise<TemporaryUrl> {
+  override async generateTemporaryUrl(
+    attachment: IAttachmentDocument,
+    opts?: RespondOptions,
+  ): Promise<TemporaryUrl> {
     if (!this.getIsUploadable()) {
       throw new Error('AWS is not configured.');
     }
 
     const s3 = S3Factory();
     const filePath = getFilePathOnStorage(attachment);
-    const lifetimeSecForTemporaryUrl = configManager.getConfig('aws:lifetimeSecForTemporaryUrl');
+    const lifetimeSecForTemporaryUrl = configManager.getConfig(
+      'aws:lifetimeSecForTemporaryUrl',
+    );
 
     // issue signed url (default: expires 120 seconds)
     // https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getSignedUrl-property
     const isDownload = opts?.download ?? false;
-    const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
+    const contentHeaders = createContentHeaders(attachment, {
+      inline: !isDownload,
+    });
     const params: GetObjectCommandInput = {
       Bucket: getS3Bucket(),
       Key: filePath,
-      ResponseContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
-      ResponseContentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
+      ResponseContentType: getContentHeaderValue(
+        contentHeaders,
+        'Content-Type',
+      ),
+      ResponseContentDisposition: getContentHeaderValue(
+        contentHeaders,
+        'Content-Disposition',
+      ),
     };
     const signedUrl = await getSignedUrl(s3, new GetObjectCommand(params), {
       expiresIn: lifetimeSecForTemporaryUrl,
@@ -345,7 +388,6 @@ class AwsFileUploader extends AbstractFileUploader {
       url: signedUrl,
       lifetimeSec: lifetimeSecForTemporaryUrl,
     };
-
   }
 
   override createMultipartUploader(uploadKey: string, maxPartSize: number) {
@@ -353,56 +395,65 @@ class AwsFileUploader extends AbstractFileUploader {
     return new AwsMultipartUploader(s3, getS3Bucket(), uploadKey, maxPartSize);
   }
 
-  override async abortPreviousMultipartUpload(uploadKey: string, uploadId: string) {
+  override async abortPreviousMultipartUpload(
+    uploadKey: string,
+    uploadId: string,
+  ) {
     try {
-      await S3Factory().send(new AbortMultipartUploadCommand({
-        Bucket: getS3Bucket(),
-        Key: uploadKey,
-        UploadId: uploadId,
-      }));
-      logger.debug(`Successfully aborted multipart upload: uploadKey=${uploadKey}, uploadId=${uploadId}`);
-    }
-    catch (e) {
+      await S3Factory().send(
+        new AbortMultipartUploadCommand({
+          Bucket: getS3Bucket(),
+          Key: uploadKey,
+          UploadId: uploadId,
+        }),
+      );
+      logger.debug(
+        `Successfully aborted multipart upload: uploadKey=${uploadKey}, uploadId=${uploadId}`,
+      );
+    } catch (e) {
       // allow duplicate abort requests to ensure abortion
       if (e.response?.status !== 404) {
-        logger.error(`Failed to abort multipart upload: uploadKey=${uploadKey}, uploadId=${uploadId}`, e);
+        logger.error(
+          `Failed to abort multipart upload: uploadKey=${uploadKey}, uploadId=${uploadId}`,
+          e,
+        );
         throw e;
       }
-      logger.debug(`Multipart upload already aborted: uploadKey=${uploadKey}, uploadId=${uploadId}`);
+      logger.debug(
+        `Multipart upload already aborted: uploadKey=${uploadKey}, uploadId=${uploadId}`,
+      );
     }
   }
-
 }
 
 module.exports = (crowi: Crowi) => {
   const lib = new AwsFileUploader(crowi);
 
-  lib.isValidUploadSettings = function() {
-    return configManager.getConfig('aws:s3AccessKeyId') != null
-      && configManager.getConfig('aws:s3SecretAccessKey') != null
-      && (
-        configManager.getConfig('aws:s3Region') != null
-          || configManager.getConfig('aws:s3CustomEndpoint') != null
-      )
-      && configManager.getConfig('aws:s3Bucket') != null;
-  };
+  lib.isValidUploadSettings = () =>
+    configManager.getConfig('aws:s3AccessKeyId') != null &&
+    configManager.getConfig('aws:s3SecretAccessKey') != null &&
+    (configManager.getConfig('aws:s3Region') != null ||
+      configManager.getConfig('aws:s3CustomEndpoint') != null) &&
+    configManager.getConfig('aws:s3Bucket') != null;
 
-  lib.saveFile = async function({ filePath, contentType, data }) {
+  lib.saveFile = async ({ filePath, contentType, data }) => {
     const s3 = S3Factory();
 
-    return s3.send(new PutObjectCommand({
-      Bucket: getS3Bucket(),
-      ContentType: contentType,
-      Key: filePath,
-      Body: data,
-      ACL: getS3PutObjectCannedAcl(),
-    }));
+    return s3.send(
+      new PutObjectCommand({
+        Bucket: getS3Bucket(),
+        ContentType: contentType,
+        Key: filePath,
+        Body: data,
+        ACL: getS3PutObjectCannedAcl(),
+      }),
+    );
   };
 
   /**
    * List files in storage
    */
-  (lib as any).listFiles = async function() {
+  (lib as any).listFiles = async () => {
     if (!lib.getIsReadable()) {
       throw new Error('AWS is not configured.');
     }
@@ -418,22 +469,27 @@ module.exports = (crowi: Crowi) => {
     // handle pagination
     while (shouldContinue) {
       // eslint-disable-next-line no-await-in-loop
-      const { Contents = [], IsTruncated, NextMarker } = await s3.send(new ListObjectsCommand({
-        ...params,
-        Marker: nextMarker,
-      }));
-      files.push(...(
-        Contents.map(({ Key, Size }) => ({
+      const {
+        Contents = [],
+        IsTruncated,
+        NextMarker,
+      } = await s3.send(
+        new ListObjectsCommand({
+          ...params,
+          Marker: nextMarker,
+        }),
+      );
+      files.push(
+        ...Contents.map(({ Key, Size }) => ({
           name: Key as string,
           size: Size as number,
-        }))
-      ));
+        })),
+      );
 
       if (!IsTruncated) {
         shouldContinue = false;
         nextMarker = undefined;
-      }
-      else {
+      } else {
         nextMarker = NextMarker;
       }
     }

+ 65 - 39
apps/app/src/server/service/file-uploader/aws/multipart-uploader.ts

@@ -1,26 +1,42 @@
 import {
-  CreateMultipartUploadCommand, UploadPartCommand, type S3Client, CompleteMultipartUploadCommand, AbortMultipartUploadCommand,
+  AbortMultipartUploadCommand,
+  CompleteMultipartUploadCommand,
+  CreateMultipartUploadCommand,
   HeadObjectCommand,
+  type S3Client,
+  UploadPartCommand,
 } from '@aws-sdk/client-s3';
 
 import loggerFactory from '~/utils/logger';
 
-import { MultipartUploader, UploadStatus, type IMultipartUploader } from '../multipart-uploader';
-
-
-const logger = loggerFactory('growi:services:fileUploaderAws:multipartUploader');
+import {
+  type IMultipartUploader,
+  MultipartUploader,
+  UploadStatus,
+} from '../multipart-uploader';
 
-export type IAwsMultipartUploader = IMultipartUploader
+const logger = loggerFactory(
+  'growi:services:fileUploaderAws:multipartUploader',
+);
 
-export class AwsMultipartUploader extends MultipartUploader implements IAwsMultipartUploader {
+export type IAwsMultipartUploader = IMultipartUploader;
 
+export class AwsMultipartUploader
+  extends MultipartUploader
+  implements IAwsMultipartUploader
+{
   private bucket: string | undefined;
 
   private s3Client: S3Client;
 
-  private parts: { PartNumber: number; ETag: string | undefined; }[] = [];
+  private parts: { PartNumber: number; ETag: string | undefined }[] = [];
 
-  constructor(s3Client: S3Client, bucket: string | undefined, uploadKey: string, maxPartSize: number) {
+  constructor(
+    s3Client: S3Client,
+    bucket: string | undefined,
+    uploadKey: string,
+    maxPartSize: number,
+  ) {
     super(uploadKey, maxPartSize);
 
     this.s3Client = s3Client;
@@ -31,10 +47,12 @@ export class AwsMultipartUploader extends MultipartUploader implements IAwsMulti
   async initUpload(): Promise<void> {
     this.validateUploadStatus(UploadStatus.BEFORE_INIT);
 
-    const response = await this.s3Client.send(new CreateMultipartUploadCommand({
-      Bucket: this.bucket,
-      Key: this.uploadKey,
-    }));
+    const response = await this.s3Client.send(
+      new CreateMultipartUploadCommand({
+        Bucket: this.bucket,
+        Key: this.uploadKey,
+      }),
+    );
     if (response.UploadId == null) {
       throw Error('UploadId is empty');
     }
@@ -47,13 +65,15 @@ export class AwsMultipartUploader extends MultipartUploader implements IAwsMulti
     this.validateUploadStatus(UploadStatus.IN_PROGRESS);
     this.validatePartSize(part.length);
 
-    const uploadMetaData = await this.s3Client.send(new UploadPartCommand({
-      Body: part,
-      Bucket: this.bucket,
-      Key: this.uploadKey,
-      PartNumber: partNumber,
-      UploadId: this.uploadId,
-    }));
+    const uploadMetaData = await this.s3Client.send(
+      new UploadPartCommand({
+        Body: part,
+        Bucket: this.bucket,
+        Key: this.uploadKey,
+        PartNumber: partNumber,
+        UploadId: this.uploadId,
+      }),
+    );
 
     this.parts.push({
       PartNumber: partNumber,
@@ -65,14 +85,16 @@ export class AwsMultipartUploader extends MultipartUploader implements IAwsMulti
   async completeUpload(): Promise<void> {
     this.validateUploadStatus(UploadStatus.IN_PROGRESS);
 
-    await this.s3Client.send(new CompleteMultipartUploadCommand({
-      Bucket: this.bucket,
-      Key: this.uploadKey,
-      UploadId: this.uploadId,
-      MultipartUpload: {
-        Parts: this.parts,
-      },
-    }));
+    await this.s3Client.send(
+      new CompleteMultipartUploadCommand({
+        Bucket: this.bucket,
+        Key: this.uploadKey,
+        UploadId: this.uploadId,
+        MultipartUpload: {
+          Parts: this.parts,
+        },
+      }),
+    );
     this.currentStatus = UploadStatus.COMPLETED;
     logger.info(`Multipart upload completed. Upload key: ${this.uploadKey}`);
   }
@@ -80,25 +102,29 @@ export class AwsMultipartUploader extends MultipartUploader implements IAwsMulti
   async abortUpload(): Promise<void> {
     this.validateUploadStatus(UploadStatus.IN_PROGRESS);
 
-    await this.s3Client.send(new AbortMultipartUploadCommand({
-      Bucket: this.bucket,
-      Key: this.uploadKey,
-      UploadId: this.uploadId,
-    }));
+    await this.s3Client.send(
+      new AbortMultipartUploadCommand({
+        Bucket: this.bucket,
+        Key: this.uploadKey,
+        UploadId: this.uploadId,
+      }),
+    );
     this.currentStatus = UploadStatus.ABORTED;
     logger.info(`Multipart upload aborted. Upload key: ${this.uploadKey}`);
   }
 
   async getUploadedFileSize(): Promise<number> {
     if (this.currentStatus === UploadStatus.COMPLETED) {
-      const headData = await this.s3Client.send(new HeadObjectCommand({
-        Bucket: this.bucket,
-        Key: this.uploadKey,
-      }));
-      if (headData.ContentLength == null) throw Error('Could not fetch uploaded file size');
+      const headData = await this.s3Client.send(
+        new HeadObjectCommand({
+          Bucket: this.bucket,
+          Key: this.uploadKey,
+        }),
+      );
+      if (headData.ContentLength == null)
+        throw Error('Could not fetch uploaded file size');
       this._uploadedFileSize = headData.ContentLength;
     }
     return this._uploadedFileSize;
   }
-
 }

+ 127 - 67
apps/app/src/server/service/file-uploader/azure.ts

@@ -1,33 +1,37 @@
-import type { Readable } from 'stream';
-
 import type { TokenCredential } from '@azure/identity';
 import { ClientSecretCredential } from '@azure/identity';
 import type {
   BlobClient,
-  BlockBlobClient,
   BlobDeleteOptions,
+  BlockBlobClient,
   ContainerClient,
 } from '@azure/storage-blob';
 import {
-  generateBlobSASQueryParameters,
+  type BlobDeleteIfExistsResponse,
   BlobServiceClient,
+  type BlockBlobParallelUploadOptions,
+  type BlockBlobUploadResponse,
   ContainerSASPermissions,
+  generateBlobSASQueryParameters,
   SASProtocol,
-  type BlobDeleteIfExistsResponse,
-  type BlockBlobUploadResponse,
-  type BlockBlobParallelUploadOptions,
 } from '@azure/storage-blob';
 import { toNonBlankStringOrUndefined } from '@growi/core/dist/interfaces';
+import type { Readable } from 'stream';
 
 import type Crowi from '~/server/crowi';
-import { FilePathOnStoragePrefix, ResponseMode, type RespondOptions } from '~/server/interfaces/attachment';
+import {
+  FilePathOnStoragePrefix,
+  type RespondOptions,
+  ResponseMode,
+} from '~/server/interfaces/attachment';
 import type { IAttachmentDocument } from '~/server/models/attachment';
 import loggerFactory from '~/utils/logger';
 
 import { configManager } from '../config-manager';
-
 import {
-  AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
+  AbstractFileUploader,
+  type SaveFileParam,
+  type TemporaryUrl,
 } from './file-uploader';
 import { createContentHeaders, getContentHeaderValue } from './utils';
 
@@ -41,15 +45,17 @@ interface FileMeta {
 }
 
 type AzureConfig = {
-  accountName: string,
-  containerName: string,
-}
+  accountName: string;
+  containerName: string;
+};
 
 // Cache holders to avoid repeated instantiation of credential and clients
-let cachedCredential: { key: string, credential: TokenCredential } | null = null;
-let cachedBlobServiceClient: { key: string, client: BlobServiceClient } | null = null;
-let cachedContainerClient: { key: string, client: ContainerClient } | null = null;
-
+let cachedCredential: { key: string; credential: TokenCredential } | null =
+  null;
+let cachedBlobServiceClient: { key: string; client: BlobServiceClient } | null =
+  null;
+let cachedContainerClient: { key: string; client: ContainerClient } | null =
+  null;
 
 function getAzureConfig(): AzureConfig {
   const accountName = configManager.getConfig('azure:storageAccountName');
@@ -67,12 +73,20 @@ function getAzureConfig(): AzureConfig {
 
 function getCredential(): TokenCredential {
   // Build cache key from credential-related configs
-  const tenantId = toNonBlankStringOrUndefined(configManager.getConfig('azure:tenantId'));
-  const clientId = toNonBlankStringOrUndefined(configManager.getConfig('azure:clientId'));
-  const clientSecret = toNonBlankStringOrUndefined(configManager.getConfig('azure:clientSecret'));
+  const tenantId = toNonBlankStringOrUndefined(
+    configManager.getConfig('azure:tenantId'),
+  );
+  const clientId = toNonBlankStringOrUndefined(
+    configManager.getConfig('azure:clientId'),
+  );
+  const clientSecret = toNonBlankStringOrUndefined(
+    configManager.getConfig('azure:clientSecret'),
+  );
 
   if (tenantId == null || clientId == null || clientSecret == null) {
-    throw new Error(`Azure Blob Storage missing required configuration: tenantId=${tenantId}, clientId=${clientId}, clientSecret=${clientSecret}`);
+    throw new Error(
+      `Azure Blob Storage missing required configuration: tenantId=${tenantId}, clientId=${clientId}, clientSecret=${clientSecret}`,
+    );
   }
 
   const key = `${tenantId}|${clientId}|${clientSecret}`;
@@ -82,7 +96,11 @@ function getCredential(): TokenCredential {
     return cachedCredential.credential;
   }
 
-  const credential = new ClientSecretCredential(tenantId, clientId, clientSecret);
+  const credential = new ClientSecretCredential(
+    tenantId,
+    clientId,
+    clientSecret,
+  );
   cachedCredential = { key, credential };
   return credential;
 }
@@ -91,7 +109,7 @@ function getBlobServiceClient(): BlobServiceClient {
   const { accountName } = getAzureConfig();
   // Include credential cache key to ensure we re-create if cred changed
   const credential = getCredential();
-  const credentialKey = (cachedCredential?.key) ?? 'unknown-cred';
+  const credentialKey = cachedCredential?.key ?? 'unknown-cred';
   const key = `${accountName}|${credentialKey}`;
 
   if (cachedBlobServiceClient != null && cachedBlobServiceClient.key === key) {
@@ -110,7 +128,7 @@ function getBlobServiceClient(): BlobServiceClient {
 
 async function getContainerClient(): Promise<ContainerClient> {
   const { accountName, containerName } = getAzureConfig();
-  const credentialKey = (cachedCredential?.key) ?? 'unknown-cred';
+  const credentialKey = cachedCredential?.key ?? 'unknown-cred';
   const key = `${accountName}|${containerName}|${credentialKey}`;
 
   if (cachedContainerClient != null && cachedContainerClient.key === key) {
@@ -124,12 +142,14 @@ async function getContainerClient(): Promise<ContainerClient> {
 }
 
 function getFilePathOnStorage(attachment: IAttachmentDocument) {
-  const dirName = (attachment.page != null) ? FilePathOnStoragePrefix.attachment : FilePathOnStoragePrefix.user;
+  const dirName =
+    attachment.page != null
+      ? FilePathOnStoragePrefix.attachment
+      : FilePathOnStoragePrefix.user;
   return urljoin(dirName, attachment.fileName);
 }
 
 class AzureFileUploader extends AbstractFileUploader {
-
   /**
    * @inheritdoc
    */
@@ -137,8 +157,7 @@ class AzureFileUploader extends AbstractFileUploader {
     try {
       getAzureConfig();
       return true;
-    }
-    catch (e) {
+    } catch (e) {
       logger.error(e);
       return false;
     }
@@ -166,7 +185,8 @@ class AzureFileUploader extends AbstractFileUploader {
     const containerClient = await getContainerClient();
     const blockBlobClient = await containerClient.getBlockBlobClient(filePath);
     const options: BlobDeleteOptions = { deleteSnapshots: 'include' };
-    const blobDeleteIfExistsResponse: BlobDeleteIfExistsResponse = await blockBlobClient.deleteIfExists(options);
+    const blobDeleteIfExistsResponse: BlobDeleteIfExistsResponse =
+      await blockBlobClient.deleteIfExists(options);
     if (!blobDeleteIfExistsResponse.errorCode) {
       logger.info(`deleted blob ${filePath}`);
     }
@@ -175,7 +195,9 @@ class AzureFileUploader extends AbstractFileUploader {
   /**
    * @inheritdoc
    */
-  override async deleteFiles(attachments: IAttachmentDocument[]): Promise<void> {
+  override async deleteFiles(
+    attachments: IAttachmentDocument[],
+  ): Promise<void> {
     if (!this.getIsUploadable()) {
       throw new Error('Azure is not configured.');
     }
@@ -187,7 +209,10 @@ class AzureFileUploader extends AbstractFileUploader {
   /**
    * @inheritdoc
    */
-  override async uploadAttachment(readable: Readable, attachment: IAttachmentDocument): Promise<void> {
+  override async uploadAttachment(
+    readable: Readable,
+    attachment: IAttachmentDocument,
+  ): Promise<void> {
     if (!this.getIsUploadable()) {
       throw new Error('Azure is not configured.');
     }
@@ -195,7 +220,8 @@ class AzureFileUploader extends AbstractFileUploader {
     logger.debug(`File uploading: fileName=${attachment.fileName}`);
     const filePath = getFilePathOnStorage(attachment);
     const containerClient = await getContainerClient();
-    const blockBlobClient: BlockBlobClient = containerClient.getBlockBlobClient(filePath);
+    const blockBlobClient: BlockBlobClient =
+      containerClient.getBlockBlobClient(filePath);
     const contentHeaders = createContentHeaders(attachment);
 
     try {
@@ -204,21 +230,30 @@ class AzureFileUploader extends AbstractFileUploader {
       await blockBlobClient.uploadStream(readable, undefined, undefined, {
         blobHTTPHeaders: {
           // put type and the file name for reference information when uploading
-          blobContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
-          blobContentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
+          blobContentType: getContentHeaderValue(
+            contentHeaders,
+            'Content-Type',
+          ),
+          blobContentDisposition: getContentHeaderValue(
+            contentHeaders,
+            'Content-Disposition',
+          ),
         },
         abortSignal: AbortSignal.timeout(uploadTimeout),
       });
 
-      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
-    }
-    catch (error) {
+      logger.debug(
+        `File upload completed successfully: fileName=${attachment.fileName}`,
+      );
+    } catch (error) {
       // Handle timeout error specifically
       if (error.name === 'AbortError') {
         logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
-      }
-      else {
-        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      } else {
+        logger.error(
+          `File upload failed: fileName=${attachment.fileName}`,
+          error,
+        );
       }
       // Re-throw the error to be handled by the caller.
       // The pipeline automatically handles stream cleanup on error.
@@ -239,13 +274,17 @@ class AzureFileUploader extends AbstractFileUploader {
    * @inheritdoc
    */
   override respond(): void {
-    throw new Error('AzureFileUploader does not support ResponseMode.DELEGATE.');
+    throw new Error(
+      'AzureFileUploader does not support ResponseMode.DELEGATE.',
+    );
   }
 
   /**
    * @inheritdoc
    */
-  override async findDeliveryFile(attachment: IAttachmentDocument): Promise<NodeJS.ReadableStream> {
+  override async findDeliveryFile(
+    attachment: IAttachmentDocument,
+  ): Promise<NodeJS.ReadableStream> {
     if (!this.getIsReadable()) {
       throw new Error('Azure is not configured.');
     }
@@ -259,7 +298,9 @@ class AzureFileUploader extends AbstractFileUploader {
       throw new Error(downloadResponse.errorCode);
     }
     if (!downloadResponse?.readableStreamBody) {
-      throw new Error(`Coudn't get file from Azure for the Attachment (${filePath})`);
+      throw new Error(
+        `Coudn't get file from Azure for the Attachment (${filePath})`,
+      );
     }
 
     return downloadResponse.readableStreamBody;
@@ -269,21 +310,27 @@ class AzureFileUploader extends AbstractFileUploader {
    * @inheritDoc
    * @see https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blob-create-user-delegation-sas-javascript
    */
-  override async generateTemporaryUrl(attachment: IAttachmentDocument, opts?: RespondOptions): Promise<TemporaryUrl> {
+  override async generateTemporaryUrl(
+    attachment: IAttachmentDocument,
+    opts?: RespondOptions,
+  ): Promise<TemporaryUrl> {
     if (!this.getIsUploadable()) {
       throw new Error('Azure Blob is not configured.');
     }
 
-    const lifetimeSecForTemporaryUrl = configManager.getConfig('azure:lifetimeSecForTemporaryUrl');
+    const lifetimeSecForTemporaryUrl = configManager.getConfig(
+      'azure:lifetimeSecForTemporaryUrl',
+    );
 
-    const url = await (async() => {
+    const url = await (async () => {
       const containerClient = await getContainerClient();
       const filePath = getFilePathOnStorage(attachment);
-      const blockBlobClient = await containerClient.getBlockBlobClient(filePath);
+      const blockBlobClient =
+        await containerClient.getBlockBlobClient(filePath);
       return blockBlobClient.url;
     })();
 
-    const sasToken = await (async() => {
+    const sasToken = await (async () => {
       const { accountName, containerName } = getAzureConfig();
       // Reuse the same BlobServiceClient (singleton)
       const blobServiceClient = getBlobServiceClient();
@@ -291,25 +338,39 @@ class AzureFileUploader extends AbstractFileUploader {
       const now = Date.now();
       const startsOn = new Date(now - 30 * 1000);
       const expiresOn = new Date(now + lifetimeSecForTemporaryUrl * 1000);
-      const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);
+      const userDelegationKey = await blobServiceClient.getUserDelegationKey(
+        startsOn,
+        expiresOn,
+      );
 
       const isDownload = opts?.download ?? false;
-      const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
+      const contentHeaders = createContentHeaders(attachment, {
+        inline: !isDownload,
+      });
 
       // https://github.com/Azure/azure-sdk-for-js/blob/d4d55f73/sdk/storage/storage-blob/src/ContainerSASPermissions.ts#L24
       // r:read, a:add, c:create, w:write, d:delete, l:list
       const containerPermissionsForAnonymousUser = 'rl';
       const sasOptions = {
         containerName,
-        permissions: ContainerSASPermissions.parse(containerPermissionsForAnonymousUser),
+        permissions: ContainerSASPermissions.parse(
+          containerPermissionsForAnonymousUser,
+        ),
         protocol: SASProtocol.HttpsAndHttp,
         startsOn,
         expiresOn,
         contentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
-        contentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
+        contentDisposition: getContentHeaderValue(
+          contentHeaders,
+          'Content-Disposition',
+        ),
       };
 
-      return generateBlobSASQueryParameters(sasOptions, userDelegationKey, accountName).toString();
+      return generateBlobSASQueryParameters(
+        sasOptions,
+        userDelegationKey,
+        accountName,
+      ).toString();
     })();
 
     const signedUrl = `${url}?${sasToken}`;
@@ -318,33 +379,34 @@ class AzureFileUploader extends AbstractFileUploader {
       url: signedUrl,
       lifetimeSec: lifetimeSecForTemporaryUrl,
     };
-
   }
-
 }
 
 module.exports = (crowi: Crowi) => {
   const lib = new AzureFileUploader(crowi);
 
-  lib.isValidUploadSettings = function() {
-    return configManager.getConfig('azure:storageAccountName') != null
-      && configManager.getConfig('azure:storageContainerName') != null;
-  };
+  lib.isValidUploadSettings = () =>
+    configManager.getConfig('azure:storageAccountName') != null &&
+    configManager.getConfig('azure:storageContainerName') != null;
 
-  lib.saveFile = async function({ filePath, contentType, data }) {
+  lib.saveFile = async ({ filePath, contentType, data }) => {
     const containerClient = await getContainerClient();
-    const blockBlobClient: BlockBlobClient = containerClient.getBlockBlobClient(filePath);
+    const blockBlobClient: BlockBlobClient =
+      containerClient.getBlockBlobClient(filePath);
     const options: BlockBlobParallelUploadOptions = {
       blobHTTPHeaders: {
         blobContentType: contentType,
       },
     };
-    const blockBlobUploadResponse: BlockBlobUploadResponse = await blockBlobClient.upload(data, data.length, options);
-    if (blockBlobUploadResponse.errorCode) { throw new Error(blockBlobUploadResponse.errorCode) }
+    const blockBlobUploadResponse: BlockBlobUploadResponse =
+      await blockBlobClient.upload(data, data.length, options);
+    if (blockBlobUploadResponse.errorCode) {
+      throw new Error(blockBlobUploadResponse.errorCode);
+    }
     return;
   };
 
-  (lib as any).listFiles = async function() {
+  (lib as any).listFiles = async () => {
     if (!lib.getIsReadable()) {
       throw new Error('Azure is not configured.');
     }
@@ -359,9 +421,7 @@ module.exports = (crowi: Crowi) => {
       includeVersions: false,
       prefix: '',
     })) {
-      files.push(
-        { name: blob.name, size: blob.properties.contentLength || 0 },
-      );
+      files.push({ name: blob.name, size: blob.properties.contentLength || 0 });
     }
 
     return files;

+ 100 - 48
apps/app/src/server/service/file-uploader/file-uploader.ts

@@ -1,57 +1,77 @@
-import type { Readable } from 'stream';
-
 import type { Response } from 'express';
 import type { HydratedDocument } from 'mongoose';
+import type { Readable } from 'stream';
 import { v4 as uuidv4 } from 'uuid';
 
 import type { ICheckLimitResult } from '~/interfaces/attachment';
 import type Crowi from '~/server/crowi';
-import { type RespondOptions, ResponseMode } from '~/server/interfaces/attachment';
-import { Attachment, type IAttachmentDocument } from '~/server/models/attachment';
+import {
+  type RespondOptions,
+  ResponseMode,
+} from '~/server/interfaces/attachment';
+import {
+  Attachment,
+  type IAttachmentDocument,
+} from '~/server/models/attachment';
 import loggerFactory from '~/utils/logger';
 
 import { configManager } from '../config-manager';
-
 import type { MultipartUploader } from './multipart-uploader';
 
 const logger = loggerFactory('growi:service:fileUploader');
 
-
 export type SaveFileParam = {
-  filePath: string,
-  contentType: string,
-  data,
-}
+  filePath: string;
+  contentType: string;
+  data;
+};
 
 export type TemporaryUrl = {
-  url: string,
-  lifetimeSec: number,
-}
+  url: string;
+  lifetimeSec: number;
+};
 
 export interface FileUploader {
-  getIsUploadable(): boolean,
-  isWritable(): Promise<boolean>,
-  getIsReadable(): boolean,
-  isValidUploadSettings(): boolean,
-  getFileUploadEnabled(): boolean,
-  listFiles(): any,
-  saveFile(param: SaveFileParam): Promise<any>,
-  deleteFile(attachment: HydratedDocument<IAttachmentDocument>): void,
-  deleteFiles(attachments: HydratedDocument<IAttachmentDocument>[]): void,
-  getFileUploadTotalLimit(): number,
-  getTotalFileSize(): Promise<number>,
-  checkLimit(uploadFileSize: number): Promise<ICheckLimitResult>,
-  determineResponseMode(): ResponseMode,
-  uploadAttachment(readable: Readable, attachment: IAttachmentDocument): Promise<void>,
-  respond(res: Response, attachment: IAttachmentDocument, opts?: RespondOptions): void,
-  findDeliveryFile(attachment: IAttachmentDocument): Promise<NodeJS.ReadableStream>,
-  generateTemporaryUrl(attachment: IAttachmentDocument, opts?: RespondOptions): Promise<TemporaryUrl>,
-  createMultipartUploader: (uploadKey: string, maxPartSize: number) => MultipartUploader,
-  abortPreviousMultipartUpload: (uploadKey: string, uploadId: string) => Promise<void>
+  getIsUploadable(): boolean;
+  isWritable(): Promise<boolean>;
+  getIsReadable(): boolean;
+  isValidUploadSettings(): boolean;
+  getFileUploadEnabled(): boolean;
+  listFiles(): any;
+  saveFile(param: SaveFileParam): Promise<any>;
+  deleteFile(attachment: HydratedDocument<IAttachmentDocument>): void;
+  deleteFiles(attachments: HydratedDocument<IAttachmentDocument>[]): void;
+  getFileUploadTotalLimit(): number;
+  getTotalFileSize(): Promise<number>;
+  checkLimit(uploadFileSize: number): Promise<ICheckLimitResult>;
+  determineResponseMode(): ResponseMode;
+  uploadAttachment(
+    readable: Readable,
+    attachment: IAttachmentDocument,
+  ): Promise<void>;
+  respond(
+    res: Response,
+    attachment: IAttachmentDocument,
+    opts?: RespondOptions,
+  ): void;
+  findDeliveryFile(
+    attachment: IAttachmentDocument,
+  ): Promise<NodeJS.ReadableStream>;
+  generateTemporaryUrl(
+    attachment: IAttachmentDocument,
+    opts?: RespondOptions,
+  ): Promise<TemporaryUrl>;
+  createMultipartUploader: (
+    uploadKey: string,
+    maxPartSize: number,
+  ) => MultipartUploader;
+  abortPreviousMultipartUpload: (
+    uploadKey: string,
+    uploadId: string,
+  ) => Promise<void>;
 }
 
 export abstract class AbstractFileUploader implements FileUploader {
-
   private crowi: Crowi;
 
   constructor(crowi: Crowi) {
@@ -59,7 +79,10 @@ export abstract class AbstractFileUploader implements FileUploader {
   }
 
   getIsUploadable() {
-    return !configManager.getConfig('app:fileUploadDisabled') && this.isValidUploadSettings();
+    return (
+      !configManager.getConfig('app:fileUploadDisabled') &&
+      this.isValidUploadSettings()
+    );
   }
 
   /**
@@ -68,7 +91,8 @@ export abstract class AbstractFileUploader implements FileUploader {
    */
   async isWritable() {
     const filePath = `${uuidv4()}.growi`;
-    const data = 'This file was created during g2g transfer to check write permission. You can safely remove this file.';
+    const data =
+      'This file was created during g2g transfer to check write permission. You can safely remove this file.';
 
     try {
       await this.saveFile({
@@ -79,8 +103,7 @@ export abstract class AbstractFileUploader implements FileUploader {
       // TODO: delete tmp file in background
 
       return true;
-    }
-    catch (err) {
+    } catch (err) {
       logger.error(err);
       return false;
     }
@@ -107,7 +130,9 @@ export abstract class AbstractFileUploader implements FileUploader {
 
   abstract deleteFile(attachment: HydratedDocument<IAttachmentDocument>): void;
 
-  abstract deleteFiles(attachments: HydratedDocument<IAttachmentDocument>[]): void;
+  abstract deleteFiles(
+    attachments: HydratedDocument<IAttachmentDocument>[],
+  ): void;
 
   /**
    * Returns file upload total limit in bytes.
@@ -145,14 +170,24 @@ export abstract class AbstractFileUploader implements FileUploader {
    * Check files size limits for all uploaders
    *
    */
-  protected async doCheckLimit(uploadFileSize: number, maxFileSize: number, totalLimit: number): Promise<ICheckLimitResult> {
+  protected async doCheckLimit(
+    uploadFileSize: number,
+    maxFileSize: number,
+    totalLimit: number,
+  ): Promise<ICheckLimitResult> {
     if (uploadFileSize > maxFileSize) {
-      return { isUploadable: false, errorMessage: 'File size exceeds the size limit per file' };
+      return {
+        isUploadable: false,
+        errorMessage: 'File size exceeds the size limit per file',
+      };
     }
 
     const usingFilesSize = await this.getTotalFileSize();
     if (usingFilesSize + uploadFileSize > totalLimit) {
-      return { isUploadable: false, errorMessage: 'Uploading files reaches limit' };
+      return {
+        isUploadable: false,
+        errorMessage: 'Uploading files reaches limit',
+      };
     }
 
     return { isUploadable: true };
@@ -168,32 +203,49 @@ export abstract class AbstractFileUploader implements FileUploader {
   /**
    * Create a multipart uploader for cloud storage
    */
-  createMultipartUploader(uploadKey: string, maxPartSize: number): MultipartUploader {
+  createMultipartUploader(
+    uploadKey: string,
+    maxPartSize: number,
+  ): MultipartUploader {
     throw new Error('Multipart upload not available for file upload type');
   }
 
-  abstract uploadAttachment(readable: Readable, attachment: IAttachmentDocument): Promise<void>;
+  abstract uploadAttachment(
+    readable: Readable,
+    attachment: IAttachmentDocument,
+  ): Promise<void>;
 
   /**
    * Abort an existing multipart upload without creating a MultipartUploader instance
    */
-  abortPreviousMultipartUpload(uploadKey: string, uploadId: string): Promise<void> {
+  abortPreviousMultipartUpload(
+    uploadKey: string,
+    uploadId: string,
+  ): Promise<void> {
     throw new Error('Multipart upload not available for file upload type');
   }
 
   /**
    * Respond to the HTTP request.
    */
-  abstract respond(res: Response, attachment: IAttachmentDocument, opts?: RespondOptions): void;
+  abstract respond(
+    res: Response,
+    attachment: IAttachmentDocument,
+    opts?: RespondOptions,
+  ): void;
 
   /**
    * Find the file and Return ReadStream
    */
-  abstract findDeliveryFile(attachment: IAttachmentDocument): Promise<NodeJS.ReadableStream>;
+  abstract findDeliveryFile(
+    attachment: IAttachmentDocument,
+  ): Promise<NodeJS.ReadableStream>;
 
   /**
    * Generate temporaryUrl that is valid for a very short time
    */
-  abstract generateTemporaryUrl(attachment: IAttachmentDocument, opts?: RespondOptions): Promise<TemporaryUrl>;
-
+  abstract generateTemporaryUrl(
+    attachment: IAttachmentDocument,
+    opts?: RespondOptions,
+  ): Promise<TemporaryUrl>;
 }

+ 76 - 54
apps/app/src/server/service/file-uploader/gcs/index.ts

@@ -1,31 +1,35 @@
-import type { Readable } from 'stream';
-import { pipeline } from 'stream/promises';
-
 import { Storage } from '@google-cloud/storage';
 import { toNonBlankStringOrUndefined } from '@growi/core/dist/interfaces';
 import axios from 'axios';
+import type { Readable } from 'stream';
+import { pipeline } from 'stream/promises';
 import urljoin from 'url-join';
 
 import type Crowi from '~/server/crowi';
 import {
-  AttachmentType, FilePathOnStoragePrefix, ResponseMode, type RespondOptions,
+  AttachmentType,
+  FilePathOnStoragePrefix,
+  type RespondOptions,
+  ResponseMode,
 } from '~/server/interfaces/attachment';
 import type { IAttachmentDocument } from '~/server/models/attachment';
 import loggerFactory from '~/utils/logger';
 
 import { configManager } from '../../config-manager';
 import {
-  AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
+  AbstractFileUploader,
+  type SaveFileParam,
+  type TemporaryUrl,
 } from '../file-uploader';
 import { createContentHeaders, getContentHeaderValue } from '../utils';
-
 import { GcsMultipartUploader } from './multipart-uploader';
 
 const logger = loggerFactory('growi:service:fileUploaderGcs');
 
-
 function getGcsBucket(): string {
-  const gcsBucket = toNonBlankStringOrUndefined(configManager.getConfig('gcs:bucket')); // Blank strings may remain in the DB, so convert with toNonBlankStringOrUndefined for safety
+  const gcsBucket = toNonBlankStringOrUndefined(
+    configManager.getConfig('gcs:bucket'),
+  ); // Blank strings may remain in the DB, so convert with toNonBlankStringOrUndefined for safety
   if (gcsBucket == null) {
     throw new Error('GCS bucket is not configured.');
   }
@@ -35,11 +39,14 @@ function getGcsBucket(): string {
 let storage: Storage;
 function getGcsInstance() {
   if (storage == null) {
-    const keyFilename = toNonBlankStringOrUndefined(configManager.getConfig('gcs:apiKeyJsonPath')); // Blank strings may remain in the DB, so convert with toNonBlankStringOrUndefined for safety
+    const keyFilename = toNonBlankStringOrUndefined(
+      configManager.getConfig('gcs:apiKeyJsonPath'),
+    ); // Blank strings may remain in the DB, so convert with toNonBlankStringOrUndefined for safety
     // see https://googleapis.dev/nodejs/storage/latest/Storage.html
-    storage = keyFilename != null
-      ? new Storage({ keyFilename }) // Create a client with explicit credentials
-      : new Storage(); // Create a client that uses Application Default Credentials
+    storage =
+      keyFilename != null
+        ? new Storage({ keyFilename }) // Create a client with explicit credentials
+        : new Storage(); // Create a client that uses Application Default Credentials
   }
   return storage;
 }
@@ -49,11 +56,9 @@ function getFilePathOnStorage(attachment: IAttachmentDocument) {
   let dirName: string;
   if (attachment.attachmentType === AttachmentType.PAGE_BULK_EXPORT) {
     dirName = FilePathOnStoragePrefix.pageBulkExport;
-  }
-  else if (attachment.page != null) {
+  } else if (attachment.page != null) {
     dirName = FilePathOnStoragePrefix.attachment;
-  }
-  else {
+  } else {
     dirName = FilePathOnStoragePrefix.user;
   }
   const filePath = urljoin(namespace, dirName, attachment.fileName);
@@ -73,7 +78,6 @@ async function isFileExists(file) {
 
 // TODO: rewrite this module to be a type-safe implementation
 class GcsFileUploader extends AbstractFileUploader {
-
   /**
    * @inheritdoc
    */
@@ -81,8 +85,7 @@ class GcsFileUploader extends AbstractFileUploader {
     try {
       getGcsBucket();
       return true;
-    }
-    catch (err) {
+    } catch (err) {
       logger.error(err);
       return false;
     }
@@ -113,7 +116,9 @@ class GcsFileUploader extends AbstractFileUploader {
   /**
    * @inheritdoc
    */
-  override async deleteFiles(attachments: IAttachmentDocument[]): Promise<void> {
+  override async deleteFiles(
+    attachments: IAttachmentDocument[],
+  ): Promise<void> {
     const filePaths = attachments.map((attachment) => {
       return getFilePathOnStorage(attachment);
     });
@@ -149,7 +154,10 @@ class GcsFileUploader extends AbstractFileUploader {
   /**
    * @inheritdoc
    */
-  override async uploadAttachment(readable: Readable, attachment: IAttachmentDocument): Promise<void> {
+  override async uploadAttachment(
+    readable: Readable,
+    attachment: IAttachmentDocument,
+  ): Promise<void> {
     if (!this.getIsUploadable()) {
       throw new Error('GCS is not configured.');
     }
@@ -171,21 +179,22 @@ class GcsFileUploader extends AbstractFileUploader {
       const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
 
       // Use AbortSignal.timeout() for robust timeout handling (Node.js 16+)
-      await pipeline(
-        readable,
-        writeStream,
-        { signal: AbortSignal.timeout(uploadTimeout) },
-      );
+      await pipeline(readable, writeStream, {
+        signal: AbortSignal.timeout(uploadTimeout),
+      });
 
-      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
-    }
-    catch (error) {
+      logger.debug(
+        `File upload completed successfully: fileName=${attachment.fileName}`,
+      );
+    } catch (error) {
       // Handle timeout error specifically
       if (error.name === 'AbortError') {
         logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
-      }
-      else {
-        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      } else {
+        logger.error(
+          `File upload failed: fileName=${attachment.fileName}`,
+          error,
+        );
       }
       // Re-throw the error to be handled by the caller.
       // The pipeline automatically handles stream cleanup on error.
@@ -203,7 +212,9 @@ class GcsFileUploader extends AbstractFileUploader {
   /**
    * @inheritdoc
    */
-  override async findDeliveryFile(attachment: IAttachmentDocument): Promise<NodeJS.ReadableStream> {
+  override async findDeliveryFile(
+    attachment: IAttachmentDocument,
+  ): Promise<NodeJS.ReadableStream> {
     if (!this.getIsReadable()) {
       throw new Error('GCS is not configured.');
     }
@@ -216,22 +227,28 @@ class GcsFileUploader extends AbstractFileUploader {
     // check file exists
     const isExists = await isFileExists(file);
     if (!isExists) {
-      throw new Error(`Any object that relate to the Attachment (${filePath}) does not exist in GCS`);
+      throw new Error(
+        `Any object that relate to the Attachment (${filePath}) does not exist in GCS`,
+      );
     }
 
     try {
       return file.createReadStream();
-    }
-    catch (err) {
+    } catch (err) {
       logger.error(err);
-      throw new Error(`Coudn't get file from GCS for the Attachment (${attachment._id.toString()})`);
+      throw new Error(
+        `Coudn't get file from GCS for the Attachment (${attachment._id.toString()})`,
+      );
     }
   }
 
   /**
    * @inheritDoc
    */
-  override async generateTemporaryUrl(attachment: IAttachmentDocument, opts?: RespondOptions): Promise<TemporaryUrl> {
+  override async generateTemporaryUrl(
+    attachment: IAttachmentDocument,
+    opts?: RespondOptions,
+  ): Promise<TemporaryUrl> {
     if (!this.getIsUploadable()) {
       throw new Error('GCS is not configured.');
     }
@@ -240,24 +257,30 @@ class GcsFileUploader extends AbstractFileUploader {
     const myBucket = gcs.bucket(getGcsBucket());
     const filePath = getFilePathOnStorage(attachment);
     const file = myBucket.file(filePath);
-    const lifetimeSecForTemporaryUrl = configManager.getConfig('gcs:lifetimeSecForTemporaryUrl');
+    const lifetimeSecForTemporaryUrl = configManager.getConfig(
+      'gcs:lifetimeSecForTemporaryUrl',
+    );
 
     // issue signed url (default: expires 120 seconds)
     // https://cloud.google.com/storage/docs/access-control/signed-urls
     const isDownload = opts?.download ?? false;
-    const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
+    const contentHeaders = createContentHeaders(attachment, {
+      inline: !isDownload,
+    });
     const [signedUrl] = await file.getSignedUrl({
       action: 'read',
       expires: Date.now() + lifetimeSecForTemporaryUrl * 1000,
       responseType: getContentHeaderValue(contentHeaders, 'Content-Type'),
-      responseDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
+      responseDisposition: getContentHeaderValue(
+        contentHeaders,
+        'Content-Disposition',
+      ),
     });
 
     return {
       url: signedUrl,
       lifetimeSec: lifetimeSecForTemporaryUrl,
     };
-
   }
 
   override createMultipartUploader(uploadKey: string, maxPartSize: number) {
@@ -266,11 +289,13 @@ class GcsFileUploader extends AbstractFileUploader {
     return new GcsMultipartUploader(myBucket, uploadKey, maxPartSize);
   }
 
-  override async abortPreviousMultipartUpload(uploadKey: string, uploadId: string) {
+  override async abortPreviousMultipartUpload(
+    uploadKey: string,
+    uploadId: string,
+  ) {
     try {
       await axios.delete(uploadId);
-    }
-    catch (e) {
+    } catch (e) {
       // allow 404: allow duplicate abort requests to ensure abortion
       // allow 499: it is the success response code for canceling upload
       // ref: https://cloud.google.com/storage/docs/performing-resumable-uploads#cancel-upload
@@ -279,19 +304,16 @@ class GcsFileUploader extends AbstractFileUploader {
       }
     }
   }
-
 }
 
-
-module.exports = function(crowi: Crowi) {
+module.exports = (crowi: Crowi) => {
   const lib = new GcsFileUploader(crowi);
 
-  lib.isValidUploadSettings = function() {
-    return configManager.getConfig('gcs:apiKeyJsonPath') != null
-      && configManager.getConfig('gcs:bucket') != null;
-  };
+  lib.isValidUploadSettings = () =>
+    configManager.getConfig('gcs:apiKeyJsonPath') != null &&
+    configManager.getConfig('gcs:bucket') != null;
 
-  lib.saveFile = async function({ filePath, contentType, data }) {
+  lib.saveFile = async ({ filePath, contentType, data }) => {
     const gcs = getGcsInstance();
     const myBucket = gcs.bucket(getGcsBucket());
 
@@ -301,7 +323,7 @@ module.exports = function(crowi: Crowi) {
   /**
    * List files in storage
    */
-  (lib as any).listFiles = async function() {
+  (lib as any).listFiles = async () => {
     if (!lib.getIsReadable()) {
       throw new Error('GCS is not configured.');
     }

+ 33 - 20
apps/app/src/server/service/file-uploader/gcs/multipart-uploader.ts

@@ -6,14 +6,22 @@ import urljoin from 'url-join';
 import loggerFactory from '~/utils/logger';
 
 import { configManager } from '../../config-manager';
-import { MultipartUploader, UploadStatus, type IMultipartUploader } from '../multipart-uploader';
-
-const logger = loggerFactory('growi:services:fileUploaderGcs:multipartUploader');
-
-export type IGcsMultipartUploader = IMultipartUploader
-
-export class GcsMultipartUploader extends MultipartUploader implements IGcsMultipartUploader {
-
+import {
+  type IMultipartUploader,
+  MultipartUploader,
+  UploadStatus,
+} from '../multipart-uploader';
+
+const logger = loggerFactory(
+  'growi:services:fileUploaderGcs:multipartUploader',
+);
+
+export type IGcsMultipartUploader = IMultipartUploader;
+
+export class GcsMultipartUploader
+  extends MultipartUploader
+  implements IGcsMultipartUploader
+{
   private file: File;
 
   // ref: https://cloud.google.com/storage/docs/performing-resumable-uploads?hl=en#chunked-upload
@@ -43,16 +51,20 @@ export class GcsMultipartUploader extends MultipartUploader implements IGcsMulti
     // Upload the whole part in one request, or divide it in chunks and upload depending on the part size
     if (part.length === this.maxPartSize) {
       await this.uploadChunk(part);
-    }
-    else if (this.minPartSize < part.length && part.length < this.maxPartSize) {
+    } else if (
+      this.minPartSize < part.length &&
+      part.length < this.maxPartSize
+    ) {
       const numOfMinPartSize = Math.floor(part.length / this.minPartSize);
-      const minPartSizeMultiplePartChunk = part.slice(0, numOfMinPartSize * this.minPartSize);
+      const minPartSizeMultiplePartChunk = part.slice(
+        0,
+        numOfMinPartSize * this.minPartSize,
+      );
       const lastPartChunk = part.slice(numOfMinPartSize * this.minPartSize);
 
       await this.uploadChunk(minPartSizeMultiplePartChunk);
       await this.uploadChunk(lastPartChunk, true);
-    }
-    else if (part.length < this.minPartSize) {
+    } else if (part.length < this.minPartSize) {
       await this.uploadChunk(part, true);
     }
   }
@@ -75,8 +87,7 @@ export class GcsMultipartUploader extends MultipartUploader implements IGcsMulti
 
     try {
       await axios.delete(this.uploadId);
-    }
-    catch (e) {
+    } catch (e) {
       // 499 is successful response code for canceling upload
       // ref: https://cloud.google.com/storage/docs/performing-resumable-uploads#cancel-upload
       if (e.response?.status !== 499) {
@@ -95,10 +106,14 @@ export class GcsMultipartUploader extends MultipartUploader implements IGcsMulti
     return this._uploadedFileSize;
   }
 
-  private uploadChunk = async(chunk, isLastUpload = false) => {
+  private uploadChunk = async (chunk, isLastUpload = false) => {
     // If chunk size is larger than the minimal part size, it is required to be a multiple of the minimal part size
     // ref: https://cloud.google.com/storage/docs/performing-resumable-uploads?hl=en#chunked-upload
-    if (chunk.length > this.minPartSize && chunk.length % this.minPartSize !== 0) throw Error(`chunk must be a multiple of ${this.minPartSize}`);
+    if (
+      chunk.length > this.minPartSize &&
+      chunk.length % this.minPartSize !== 0
+    )
+      throw Error(`chunk must be a multiple of ${this.minPartSize}`);
 
     const range = isLastUpload
       ? `bytes ${this._uploadedFileSize}-${this._uploadedFileSize + chunk.length - 1}/${this._uploadedFileSize + chunk.length}`
@@ -110,13 +125,11 @@ export class GcsMultipartUploader extends MultipartUploader implements IGcsMulti
           'Content-Range': `${range}`,
         },
       });
-    }
-    catch (e) {
+    } catch (e) {
       if (e.response?.status !== 308) {
         throw e;
       }
     }
     this._uploadedFileSize += chunk.length;
   };
-
 }

+ 79 - 40
apps/app/src/server/service/file-uploader/gridfs.ts

@@ -1,8 +1,7 @@
-import { Readable } from 'stream';
-import util from 'util';
-
 import mongoose from 'mongoose';
 import { createModel } from 'mongoose-gridfs';
+import { Readable } from 'stream';
+import util from 'util';
 
 import type Crowi from '~/server/crowi';
 import type { RespondOptions } from '~/server/interfaces/attachment';
@@ -10,8 +9,11 @@ import type { IAttachmentDocument } from '~/server/models/attachment';
 import loggerFactory from '~/utils/logger';
 
 import { configManager } from '../config-manager';
-
-import { AbstractFileUploader, type TemporaryUrl, type SaveFileParam } from './file-uploader';
+import {
+  AbstractFileUploader,
+  type SaveFileParam,
+  type TemporaryUrl,
+} from './file-uploader';
 import { createContentHeaders, getContentHeaderValue } from './utils';
 
 const logger = loggerFactory('growi:service:fileUploaderGridfs');
@@ -27,7 +29,7 @@ type PromisifiedUtils = {
   unlink: (file: object, done?: Function) => void;
   promisifiedWrite: (file: object, readable: Readable) => Promise<any>;
   promisifiedUnlink: (file: object) => Promise<any>;
-}
+};
 
 type AttachmentFileModel = mongoose.Model<any> & PromisifiedUtils;
 
@@ -40,10 +42,20 @@ let cachedConnection: mongoose.Connection; // Track the connection instance itse
  * Initialize GridFS models with connection instance monitoring
  * This prevents memory leaks from repeated model creation
  */
-function initializeGridFSModels(): { attachmentFileModel: AttachmentFileModel, chunkCollection: mongoose.Collection } {
+function initializeGridFSModels(): {
+  attachmentFileModel: AttachmentFileModel;
+  chunkCollection: mongoose.Collection;
+} {
   // Check if we can reuse cached models by comparing connection instance
-  if (cachedAttachmentFileModel != null && cachedChunkCollection != null && cachedConnection === mongoose.connection) {
-    return { attachmentFileModel: cachedAttachmentFileModel, chunkCollection: cachedChunkCollection };
+  if (
+    cachedAttachmentFileModel != null &&
+    cachedChunkCollection != null &&
+    cachedConnection === mongoose.connection
+  ) {
+    return {
+      attachmentFileModel: cachedAttachmentFileModel,
+      chunkCollection: cachedChunkCollection,
+    };
   }
 
   // Check connection state
@@ -62,8 +74,12 @@ function initializeGridFSModels(): { attachmentFileModel: AttachmentFileModel, c
 
   // Setup promisified methods on the model instance (not globally)
   if (!attachmentFileModel.promisifiedWrite) {
-    attachmentFileModel.promisifiedWrite = util.promisify(attachmentFileModel.write).bind(attachmentFileModel);
-    attachmentFileModel.promisifiedUnlink = util.promisify(attachmentFileModel.unlink).bind(attachmentFileModel);
+    attachmentFileModel.promisifiedWrite = util
+      .promisify(attachmentFileModel.write)
+      .bind(attachmentFileModel);
+    attachmentFileModel.promisifiedUnlink = util
+      .promisify(attachmentFileModel.unlink)
+      .bind(attachmentFileModel);
   }
 
   // Cache the instances
@@ -76,10 +92,8 @@ function initializeGridFSModels(): { attachmentFileModel: AttachmentFileModel, c
   return { attachmentFileModel, chunkCollection };
 }
 
-
 // TODO: rewrite this module to be a type-safe implementation
 class GridfsFileUploader extends AbstractFileUploader {
-
   /**
    * @inheritdoc
    */
@@ -108,10 +122,14 @@ class GridfsFileUploader extends AbstractFileUploader {
     const { attachmentFileModel } = initializeGridFSModels();
     const filenameValue = attachment.fileName;
 
-    const attachmentFile = await attachmentFileModel.findOne({ filename: filenameValue });
+    const attachmentFile = await attachmentFileModel.findOne({
+      filename: filenameValue,
+    });
 
     if (attachmentFile == null) {
-      logger.warn(`Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in GridFS`);
+      logger.warn(
+        `Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in GridFS`,
+      );
       return;
     }
 
@@ -123,14 +141,21 @@ class GridfsFileUploader extends AbstractFileUploader {
    *
    * Bulk delete files since unlink method of mongoose-gridfs does not support bulk operation
    */
-  override async deleteFiles(attachments: IAttachmentDocument[]): Promise<void> {
+  override async deleteFiles(
+    attachments: IAttachmentDocument[],
+  ): Promise<void> {
     const { attachmentFileModel, chunkCollection } = initializeGridFSModels();
 
     const filenameValues = attachments.map((attachment) => {
       return attachment.fileName;
     });
-    const fileIdObjects = await attachmentFileModel.find({ filename: { $in: filenameValues } }, { _id: 1 });
-    const idsRelatedFiles = fileIdObjects.map((obj) => { return obj._id });
+    const fileIdObjects = await attachmentFileModel.find(
+      { filename: { $in: filenameValues } },
+      { _id: 1 },
+    );
+    const idsRelatedFiles = fileIdObjects.map((obj) => {
+      return obj._id;
+    });
 
     await Promise.all([
       attachmentFileModel.deleteMany({ filename: { $in: filenameValues } }),
@@ -145,13 +170,19 @@ class GridfsFileUploader extends AbstractFileUploader {
    * {@link https://github.com/growilabs/growi/blob/798e44f14ad01544c1d75ba83d4dfb321a94aa0b/src/server/service/file-uploader/gridfs.js#L86-L88}
    */
   override getFileUploadTotalLimit() {
-    return configManager.getConfig('gridfs:totalLimit') ?? configManager.getConfig('app:fileUploadTotalLimit');
+    return (
+      configManager.getConfig('gridfs:totalLimit') ??
+      configManager.getConfig('app:fileUploadTotalLimit')
+    );
   }
 
   /**
    * @inheritdoc
    */
-  override async uploadAttachment(readable: Readable, attachment: IAttachmentDocument): Promise<void> {
+  override async uploadAttachment(
+    readable: Readable,
+    attachment: IAttachmentDocument,
+  ): Promise<void> {
     logger.debug(`File uploading: fileName=${attachment.fileName}`);
 
     const { attachmentFileModel } = initializeGridFSModels();
@@ -171,34 +202,39 @@ class GridfsFileUploader extends AbstractFileUploader {
    * @inheritdoc
    */
   override respond(): void {
-    throw new Error('GridfsFileUploader does not support ResponseMode.DELEGATE.');
+    throw new Error(
+      'GridfsFileUploader does not support ResponseMode.DELEGATE.',
+    );
   }
 
   /**
    * @inheritdoc
    */
-  override findDeliveryFile(attachment: IAttachmentDocument): Promise<NodeJS.ReadableStream> {
+  override findDeliveryFile(
+    attachment: IAttachmentDocument,
+  ): Promise<NodeJS.ReadableStream> {
     throw new Error('Method not implemented.');
   }
 
   /**
    * @inheritDoc
    */
-  override async generateTemporaryUrl(attachment: IAttachmentDocument, opts?: RespondOptions): Promise<TemporaryUrl> {
-    throw new Error('GridfsFileUploader does not support ResponseMode.REDIRECT.');
+  override async generateTemporaryUrl(
+    attachment: IAttachmentDocument,
+    opts?: RespondOptions,
+  ): Promise<TemporaryUrl> {
+    throw new Error(
+      'GridfsFileUploader does not support ResponseMode.REDIRECT.',
+    );
   }
-
 }
 
-
-module.exports = function(crowi: Crowi) {
+module.exports = (crowi: Crowi) => {
   const lib = new GridfsFileUploader(crowi);
 
-  lib.isValidUploadSettings = function() {
-    return true;
-  };
+  lib.isValidUploadSettings = () => true;
 
-  lib.saveFile = async function({ filePath, contentType, data }) {
+  lib.saveFile = async ({ filePath, contentType, data }) => {
     const { attachmentFileModel } = initializeGridFSModels();
 
     // Create a readable stream from the data
@@ -227,11 +263,9 @@ module.exports = function(crowi: Crowi) {
       );
 
       return result;
-    }
-    catch (error) {
+    } catch (error) {
       throw error;
-    }
-    finally {
+    } finally {
       // Explicit cleanup to prevent memory leaks
       if (typeof readable.destroy === 'function') {
         readable.destroy();
@@ -245,14 +279,18 @@ module.exports = function(crowi: Crowi) {
    * @param {Attachment} attachment
    * @return {stream.Readable} readable stream
    */
-  lib.findDeliveryFile = async function(attachment) {
+  lib.findDeliveryFile = async (attachment) => {
     const { attachmentFileModel } = initializeGridFSModels();
     const filenameValue = attachment.fileName;
 
-    const attachmentFile = await attachmentFileModel.findOne({ filename: filenameValue });
+    const attachmentFile = await attachmentFileModel.findOne({
+      filename: filenameValue,
+    });
 
     if (attachmentFile == null) {
-      throw new Error(`Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in GridFS`);
+      throw new Error(
+        `Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in GridFS`,
+      );
     }
 
     // return stream.Readable
@@ -262,12 +300,13 @@ module.exports = function(crowi: Crowi) {
   /**
    * List files in storage
    */
-  (lib as any).listFiles = async function() {
+  (lib as any).listFiles = async () => {
     const { attachmentFileModel } = initializeGridFSModels();
 
     const attachmentFiles = await attachmentFileModel.find();
     return attachmentFiles.map(({ filename: name, length: size }) => ({
-      name, size,
+      name,
+      size,
     }));
   };
 

+ 2 - 2
apps/app/src/server/service/file-uploader/index.ts

@@ -3,7 +3,6 @@ import type Crowi from '~/server/crowi';
 import loggerFactory from '~/utils/logger';
 
 import { configManager } from '../config-manager';
-
 import type { FileUploader } from './file-uploader';
 
 export type { FileUploader } from './file-uploader';
@@ -11,7 +10,8 @@ export type { FileUploader } from './file-uploader';
 const logger = loggerFactory('growi:service:FileUploaderServise');
 
 export const getUploader = (crowi: Crowi): FileUploader => {
-  const method = EnvToModuleMappings[configManager.getConfig('app:fileUploadType')];
+  const method =
+    EnvToModuleMappings[configManager.getConfig('app:fileUploadType')];
   const modulePath = `./${method}`;
   const uploader = require(modulePath)(crowi);
 

+ 90 - 58
apps/app/src/server/service/file-uploader/local.ts

@@ -1,24 +1,29 @@
+import type { Response } from 'express';
 import type { Writable } from 'stream';
 import { Readable } from 'stream';
 import { pipeline } from 'stream/promises';
 
-import type { Response } from 'express';
-
 import type Crowi from '~/server/crowi';
-import { FilePathOnStoragePrefix, ResponseMode, type RespondOptions } from '~/server/interfaces/attachment';
+import {
+  FilePathOnStoragePrefix,
+  type RespondOptions,
+  ResponseMode,
+} from '~/server/interfaces/attachment';
 import type { IAttachmentDocument } from '~/server/models/attachment';
 import loggerFactory from '~/utils/logger';
 
 import { configManager } from '../config-manager';
-
 import {
-  AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
+  AbstractFileUploader,
+  type SaveFileParam,
+  type TemporaryUrl,
 } from './file-uploader';
 import {
-  applyHeaders, createContentHeaders, toExpressHttpHeaders,
+  applyHeaders,
+  createContentHeaders,
+  toExpressHttpHeaders,
 } from './utils';
 
-
 const logger = loggerFactory('growi:service:fileUploaderLocal');
 
 const fs = require('fs');
@@ -28,10 +33,8 @@ const path = require('path');
 const mkdir = require('mkdirp');
 const urljoin = require('url-join');
 
-
 // TODO: rewrite this module to be a type-safe implementation
 class LocalFileUploader extends AbstractFileUploader {
-
   /**
    * @inheritdoc
    */
@@ -64,19 +67,24 @@ class LocalFileUploader extends AbstractFileUploader {
   /**
    * @inheritdoc
    */
-  override async deleteFiles(attachments: IAttachmentDocument[]): Promise<void> {
-    await Promise.all(attachments.map((attachment) => {
-      return this.deleteFile(attachment);
-    }));
+  override async deleteFiles(
+    attachments: IAttachmentDocument[],
+  ): Promise<void> {
+    await Promise.all(
+      attachments.map((attachment) => {
+        return this.deleteFile(attachment);
+      }),
+    );
   }
 
   private async deleteFileByFilePath(filePath: string): Promise<void> {
     // check file exists
     try {
       fs.statSync(filePath);
-    }
-    catch (err) {
-      logger.warn(`Any AttachmentFile which path is '${filePath}' does not exist in local fs`);
+    } catch (err) {
+      logger.warn(
+        `Any AttachmentFile which path is '${filePath}' does not exist in local fs`,
+      );
       return;
     }
 
@@ -99,62 +107,80 @@ class LocalFileUploader extends AbstractFileUploader {
   /**
    * @inheritdoc
    */
-  override async uploadAttachment(readable: Readable, attachment: IAttachmentDocument): Promise<void> {
+  override async uploadAttachment(
+    readable: Readable,
+    attachment: IAttachmentDocument,
+  ): Promise<void> {
     throw new Error('Method not implemented.');
   }
 
   /**
    * @inheritdoc
    */
-  override respond(res: Response, attachment: IAttachmentDocument, opts?: RespondOptions): void {
+  override respond(
+    res: Response,
+    attachment: IAttachmentDocument,
+    opts?: RespondOptions,
+  ): void {
     throw new Error('Method not implemented.');
   }
 
   /**
    * @inheritdoc
    */
-  override findDeliveryFile(attachment: IAttachmentDocument): Promise<NodeJS.ReadableStream> {
+  override findDeliveryFile(
+    attachment: IAttachmentDocument,
+  ): Promise<NodeJS.ReadableStream> {
     throw new Error('Method not implemented.');
   }
 
   /**
    * @inheritDoc
    */
-  override async generateTemporaryUrl(attachment: IAttachmentDocument, opts?: RespondOptions): Promise<TemporaryUrl> {
-    throw new Error('LocalFileUploader does not support ResponseMode.REDIRECT.');
+  override async generateTemporaryUrl(
+    attachment: IAttachmentDocument,
+    opts?: RespondOptions,
+  ): Promise<TemporaryUrl> {
+    throw new Error(
+      'LocalFileUploader does not support ResponseMode.REDIRECT.',
+    );
   }
-
 }
 
-module.exports = function(crowi: Crowi) {
+module.exports = (crowi: Crowi) => {
   const lib = new LocalFileUploader(crowi);
 
   const basePath = path.posix.join(crowi.publicDir, 'uploads');
 
-  lib.getFilePathOnStorage = function(attachment: IAttachmentDocument) {
-    const dirName = (attachment.page != null)
-      ? FilePathOnStoragePrefix.attachment
-      : FilePathOnStoragePrefix.user;
+  lib.getFilePathOnStorage = (attachment: IAttachmentDocument) => {
+    const dirName =
+      attachment.page != null
+        ? FilePathOnStoragePrefix.attachment
+        : FilePathOnStoragePrefix.user;
     const filePath = path.posix.join(basePath, dirName, attachment.fileName);
 
     return filePath;
   };
 
   async function readdirRecursively(dirPath) {
-    const directories = await fsPromises.readdir(dirPath, { withFileTypes: true });
-    const files = await Promise.all(directories.map((directory) => {
-      const childDirPathOrFilePath = path.resolve(dirPath, directory.name);
-      return directory.isDirectory() ? readdirRecursively(childDirPathOrFilePath) : childDirPathOrFilePath;
-    }));
+    const directories = await fsPromises.readdir(dirPath, {
+      withFileTypes: true,
+    });
+    const files = await Promise.all(
+      directories.map((directory) => {
+        const childDirPathOrFilePath = path.resolve(dirPath, directory.name);
+        return directory.isDirectory()
+          ? readdirRecursively(childDirPathOrFilePath)
+          : childDirPathOrFilePath;
+      }),
+    );
 
     return files.flat();
   }
 
-  lib.isValidUploadSettings = function() {
-    return true;
-  };
+  lib.isValidUploadSettings = () => true;
 
-  lib.uploadAttachment = async function(fileStream, attachment) {
+  lib.uploadAttachment = async (fileStream, attachment) => {
     logger.debug(`File uploading: fileName=${attachment.fileName}`);
 
     const filePath = lib.getFilePathOnStorage(attachment);
@@ -167,21 +193,22 @@ module.exports = function(crowi: Crowi) {
 
     try {
       const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
-      await pipeline(
-        fileStream,
-        writeStream,
-        { signal: AbortSignal.timeout(uploadTimeout) },
-      );
+      await pipeline(fileStream, writeStream, {
+        signal: AbortSignal.timeout(uploadTimeout),
+      });
 
-      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
-    }
-    catch (error) {
+      logger.debug(
+        `File upload completed successfully: fileName=${attachment.fileName}`,
+      );
+    } catch (error) {
       // Handle timeout error specifically
       if (error.name === 'AbortError') {
         logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
-      }
-      else {
-        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      } else {
+        logger.error(
+          `File upload failed: fileName=${attachment.fileName}`,
+          error,
+        );
       }
       // Re-throw the error to be handled by the caller.
       // The pipeline automatically handles stream cleanup on error.
@@ -189,7 +216,7 @@ module.exports = function(crowi: Crowi) {
     }
   };
 
-  lib.saveFile = async function({ filePath, contentType, data }) {
+  lib.saveFile = async ({ filePath, contentType, data }) => {
     const absFilePath = path.posix.join(basePath, filePath);
     const dirpath = path.posix.dirname(absFilePath);
 
@@ -209,15 +236,16 @@ module.exports = function(crowi: Crowi) {
    * @param {Attachment} attachment
    * @return {stream.Readable} readable stream
    */
-  lib.findDeliveryFile = async function(attachment) {
+  lib.findDeliveryFile = async (attachment) => {
     const filePath = lib.getFilePathOnStorage(attachment);
 
     // check file exists
     try {
       fs.statSync(filePath);
-    }
-    catch (err) {
-      throw new Error(`Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in local fs`);
+    } catch (err) {
+      throw new Error(
+        `Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in local fs`,
+      );
     }
 
     // return stream.Readable
@@ -229,15 +257,19 @@ module.exports = function(crowi: Crowi) {
    * @param {Response} res
    * @param {Response} attachment
    */
-  lib.respond = function(res, attachment, opts) {
+  lib.respond = (res, attachment, opts) => {
     // Responce using internal redirect of nginx or Apache.
     const storagePath = lib.getFilePathOnStorage(attachment);
     const relativePath = path.relative(crowi.publicDir, storagePath);
-    const internalPathRoot = configManager.getConfig('fileUpload:local:internalRedirectPath');
+    const internalPathRoot = configManager.getConfig(
+      'fileUpload:local:internalRedirectPath',
+    );
     const internalPath = urljoin(internalPathRoot, relativePath);
 
     const isDownload = opts?.download ?? false;
-    const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
+    const contentHeaders = createContentHeaders(attachment, {
+      inline: !isDownload,
+    });
     applyHeaders(res, [
       ...toExpressHttpHeaders(contentHeaders),
       { field: 'X-Accel-Redirect', value: internalPath },
@@ -250,13 +282,13 @@ module.exports = function(crowi: Crowi) {
   /**
    * List files in storage
    */
-  lib.listFiles = async function() {
+  lib.listFiles = async () => {
     // `mkdir -p` to avoid ENOENT error
     await mkdir(basePath);
     const filePaths = await readdirRecursively(basePath);
     return Promise.all(
-      filePaths.map(
-        file => fsPromises.stat(file).then(({ size }) => ({
+      filePaths.map((file) =>
+        fsPromises.stat(file).then(({ size }) => ({
           name: path.relative(basePath, file),
           size,
         })),

+ 92 - 29
apps/app/src/server/service/file-uploader/multipart-uploader.spec.ts

@@ -1,16 +1,25 @@
-import { UploadStatus, MultipartUploader } from './multipart-uploader';
+import { MultipartUploader, UploadStatus } from './multipart-uploader';
 
 class MockMultipartUploader extends MultipartUploader {
+  async initUpload(): Promise<void> {
+    return;
+  }
 
-  async initUpload(): Promise<void> { return }
-
-  async uploadPart(part: Buffer, partNumber: number): Promise<void> { return }
+  async uploadPart(part: Buffer, partNumber: number): Promise<void> {
+    return;
+  }
 
-  async completeUpload(): Promise<void> { return }
+  async completeUpload(): Promise<void> {
+    return;
+  }
 
-  async abortUpload(): Promise<void> { return }
+  async abortUpload(): Promise<void> {
+    return;
+  }
 
-  async getUploadedFileSize(): Promise<number> { return 0 }
+  async getUploadedFileSize(): Promise<number> {
+    return 0;
+  }
 
   // Expose the protected method for testing
   testValidateUploadStatus(desired: UploadStatus): void {
@@ -20,7 +29,6 @@ class MockMultipartUploader extends MultipartUploader {
   setCurrentStatus(status: UploadStatus): void {
     this.currentStatus = status;
   }
-
 }
 
 describe('MultipartUploader', () => {
@@ -34,35 +42,90 @@ describe('MultipartUploader', () => {
     describe('When current status is equal to desired status', () => {
       it('should not throw error', () => {
         uploader.setCurrentStatus(UploadStatus.ABORTED);
-        expect(() => uploader.testValidateUploadStatus(UploadStatus.ABORTED)).not.toThrow();
+        expect(() =>
+          uploader.testValidateUploadStatus(UploadStatus.ABORTED),
+        ).not.toThrow();
       });
     });
 
     describe('When current status is not equal to desired status', () => {
       const cases = [
-        { current: UploadStatus.BEFORE_INIT, desired: UploadStatus.IN_PROGRESS, errorMessage: 'Multipart upload has not been initiated' },
-        { current: UploadStatus.BEFORE_INIT, desired: UploadStatus.COMPLETED, errorMessage: 'Multipart upload has not been initiated' },
-        { current: UploadStatus.BEFORE_INIT, desired: UploadStatus.ABORTED, errorMessage: 'Multipart upload has not been initiated' },
-        { current: UploadStatus.IN_PROGRESS, desired: UploadStatus.BEFORE_INIT, errorMessage: 'Multipart upload is already in progress' },
-        { current: UploadStatus.IN_PROGRESS, desired: UploadStatus.COMPLETED, errorMessage: 'Multipart upload is still in progress' },
-        { current: UploadStatus.IN_PROGRESS, desired: UploadStatus.ABORTED, errorMessage: 'Multipart upload is still in progress' },
-        { current: UploadStatus.COMPLETED, desired: UploadStatus.BEFORE_INIT, errorMessage: 'Multipart upload has already been completed' },
-        { current: UploadStatus.COMPLETED, desired: UploadStatus.IN_PROGRESS, errorMessage: 'Multipart upload has already been completed' },
-        { current: UploadStatus.COMPLETED, desired: UploadStatus.ABORTED, errorMessage: 'Multipart upload has already been completed' },
-        { current: UploadStatus.ABORTED, desired: UploadStatus.BEFORE_INIT, errorMessage: 'Multipart upload has been aborted' },
-        { current: UploadStatus.ABORTED, desired: UploadStatus.IN_PROGRESS, errorMessage: 'Multipart upload has been aborted' },
-        { current: UploadStatus.ABORTED, desired: UploadStatus.COMPLETED, errorMessage: 'Multipart upload has been aborted' },
+        {
+          current: UploadStatus.BEFORE_INIT,
+          desired: UploadStatus.IN_PROGRESS,
+          errorMessage: 'Multipart upload has not been initiated',
+        },
+        {
+          current: UploadStatus.BEFORE_INIT,
+          desired: UploadStatus.COMPLETED,
+          errorMessage: 'Multipart upload has not been initiated',
+        },
+        {
+          current: UploadStatus.BEFORE_INIT,
+          desired: UploadStatus.ABORTED,
+          errorMessage: 'Multipart upload has not been initiated',
+        },
+        {
+          current: UploadStatus.IN_PROGRESS,
+          desired: UploadStatus.BEFORE_INIT,
+          errorMessage: 'Multipart upload is already in progress',
+        },
+        {
+          current: UploadStatus.IN_PROGRESS,
+          desired: UploadStatus.COMPLETED,
+          errorMessage: 'Multipart upload is still in progress',
+        },
+        {
+          current: UploadStatus.IN_PROGRESS,
+          desired: UploadStatus.ABORTED,
+          errorMessage: 'Multipart upload is still in progress',
+        },
+        {
+          current: UploadStatus.COMPLETED,
+          desired: UploadStatus.BEFORE_INIT,
+          errorMessage: 'Multipart upload has already been completed',
+        },
+        {
+          current: UploadStatus.COMPLETED,
+          desired: UploadStatus.IN_PROGRESS,
+          errorMessage: 'Multipart upload has already been completed',
+        },
+        {
+          current: UploadStatus.COMPLETED,
+          desired: UploadStatus.ABORTED,
+          errorMessage: 'Multipart upload has already been completed',
+        },
+        {
+          current: UploadStatus.ABORTED,
+          desired: UploadStatus.BEFORE_INIT,
+          errorMessage: 'Multipart upload has been aborted',
+        },
+        {
+          current: UploadStatus.ABORTED,
+          desired: UploadStatus.IN_PROGRESS,
+          errorMessage: 'Multipart upload has been aborted',
+        },
+        {
+          current: UploadStatus.ABORTED,
+          desired: UploadStatus.COMPLETED,
+          errorMessage: 'Multipart upload has been aborted',
+        },
       ];
 
-      describe.each(cases)('When current status is $current and desired status is $desired', ({ current, desired, errorMessage }) => {
-        beforeEach(() => {
-          uploader.setCurrentStatus(current);
-        });
+      describe.each(cases)(
+        'When current status is $current and desired status is $desired',
+        ({ current, desired, errorMessage }) => {
+          beforeEach(() => {
+            uploader.setCurrentStatus(current);
+          });
 
-        it(`should throw expected error: "${errorMessage}"`, () => {
-          expect(() => uploader.testValidateUploadStatus(desired)).toThrow(errorMessage);
-        });
-      });
+          it(`should throw expected error: "${errorMessage}"`, () => {
+            expect(() => uploader.testValidateUploadStatus(desired)).toThrow(
+              errorMessage,
+            );
+          });
+        },
+      );
     });
   });
 });

+ 9 - 11
apps/app/src/server/service/file-uploader/multipart-uploader.ts

@@ -6,7 +6,7 @@ export enum UploadStatus {
   BEFORE_INIT,
   IN_PROGRESS,
   COMPLETED,
-  ABORTED
+  ABORTED,
 }
 
 export interface IMultipartUploader {
@@ -23,7 +23,6 @@ export interface IMultipartUploader {
  * Each instance is equivalent to a single multipart upload, and cannot be reused once completed.
  */
 export abstract class MultipartUploader implements IMultipartUploader {
-
   protected uploadKey: string;
 
   protected _uploadId: string | undefined;
@@ -44,18 +43,19 @@ export abstract class MultipartUploader implements IMultipartUploader {
     return this._uploadId;
   }
 
-  abstract initUpload(): Promise<void>
+  abstract initUpload(): Promise<void>;
 
-  abstract uploadPart(part: Buffer, partNumber: number): Promise<void>
+  abstract uploadPart(part: Buffer, partNumber: number): Promise<void>;
 
-  abstract completeUpload(): Promise<void>
+  abstract completeUpload(): Promise<void>;
 
-  abstract abortUpload(): Promise<void>
+  abstract abortUpload(): Promise<void>;
 
-  abstract getUploadedFileSize(): Promise<number>
+  abstract getUploadedFileSize(): Promise<number>;
 
   protected validatePartSize(partSize: number): void {
-    if (partSize > this.maxPartSize) throw Error(`partSize must be less than or equal to ${this.maxPartSize}`);
+    if (partSize > this.maxPartSize)
+      throw Error(`partSize must be less than or equal to ${this.maxPartSize}`);
   }
 
   protected validateUploadStatus(desiredStatus: UploadStatus): void {
@@ -74,8 +74,7 @@ export abstract class MultipartUploader implements IMultipartUploader {
     if (this.currentStatus === UploadStatus.IN_PROGRESS) {
       if (desiredStatus === UploadStatus.BEFORE_INIT) {
         errMsg = 'Multipart upload is already in progress';
-      }
-      else {
+      } else {
         errMsg = 'Multipart upload is still in progress';
       }
     }
@@ -89,5 +88,4 @@ export abstract class MultipartUploader implements IMultipartUploader {
       throw Error(errMsg);
     }
   }
-
 }

+ 39 - 13
apps/app/src/server/service/file-uploader/utils/headers.ts

@@ -3,14 +3,21 @@ import type { Response } from 'express';
 import type { ExpressHttpHeader } from '~/server/interfaces/attachment';
 import type { IAttachmentDocument } from '~/server/models/attachment';
 
-type ContentHeaderField = 'Content-Type' | 'Content-Security-Policy' | 'Content-Disposition' | 'Content-Length';
+type ContentHeaderField =
+  | 'Content-Type'
+  | 'Content-Security-Policy'
+  | 'Content-Disposition'
+  | 'Content-Length';
 type ContentHeader = ExpressHttpHeader<ContentHeaderField>;
 
 /**
  * Factory function to generate content headers.
  * This approach avoids creating a class instance for each call, improving memory efficiency.
  */
-export const createContentHeaders = (attachment: IAttachmentDocument, opts?: { inline?: boolean }): ContentHeader[] => {
+export const createContentHeaders = (
+  attachment: IAttachmentDocument,
+  opts?: { inline?: boolean },
+): ContentHeader[] => {
   const headers: ContentHeader[] = [];
 
   // Content-Type
@@ -23,7 +30,8 @@ export const createContentHeaders = (attachment: IAttachmentDocument, opts?: { i
   headers.push({
     field: 'Content-Security-Policy',
     // eslint-disable-next-line max-len
-    value: "script-src 'unsafe-hashes'; style-src 'self' 'unsafe-inline'; object-src 'none'; require-trusted-types-for 'script'; media-src 'self'; default-src 'none';",
+    value:
+      "script-src 'unsafe-hashes'; style-src 'self' 'unsafe-inline'; object-src 'none'; require-trusted-types-for 'script'; media-src 'self'; default-src 'none';",
   });
 
   // Content-Disposition
@@ -43,27 +51,45 @@ export const createContentHeaders = (attachment: IAttachmentDocument, opts?: { i
   return headers;
 };
 
-export const getContentHeaderValue = (contentHeaders: ContentHeader[], field: ContentHeaderField): string | undefined => {
-  const header = contentHeaders.find(h => h.field === field);
+export const getContentHeaderValue = (
+  contentHeaders: ContentHeader[],
+  field: ContentHeaderField,
+): string | undefined => {
+  const header = contentHeaders.find((h) => h.field === field);
   return header?.value.toString();
 };
 
 /**
  * Convert to ExpressHttpHeader[]
  */
-export function toExpressHttpHeaders(records: Record<string, string | string[]>): ExpressHttpHeader[];
-export function toExpressHttpHeaders(contentHeaders: ContentHeader[]): ExpressHttpHeader[];
-export function toExpressHttpHeaders(arg: Record<string, string | string[]> | ContentHeader[]): ExpressHttpHeader[] {
+export function toExpressHttpHeaders(
+  records: Record<string, string | string[]>,
+): ExpressHttpHeader[];
+export function toExpressHttpHeaders(
+  contentHeaders: ContentHeader[],
+): ExpressHttpHeader[];
+export function toExpressHttpHeaders(
+  arg: Record<string, string | string[]> | ContentHeader[],
+): ExpressHttpHeader[] {
   if (Array.isArray(arg)) {
-    return arg
-      // exclude undefined
-      .filter((member): member is NonNullable<typeof member> => member != null);
+    return (
+      arg
+        // exclude undefined
+        .filter(
+          (member): member is NonNullable<typeof member> => member != null,
+        )
+    );
   }
 
-  return Object.entries(arg).map(([field, value]) => { return { field, value } });
+  return Object.entries(arg).map(([field, value]) => {
+    return { field, value };
+  });
 }
 
-export const applyHeaders = (res: Response, headers: ExpressHttpHeader[]): void => {
+export const applyHeaders = (
+  res: Response,
+  headers: ExpressHttpHeader[],
+): void => {
   headers.forEach((header) => {
     res.header(header.field, header.value);
   });

+ 27 - 11
apps/app/src/server/service/global-notification/global-notification-mail.js

@@ -1,6 +1,9 @@
 import nodePath from 'path';
 
-import { GlobalNotificationSettingEvent, GlobalNotificationSettingType } from '~/server/models/GlobalNotificationSetting';
+import {
+  GlobalNotificationSettingEvent,
+  GlobalNotificationSettingType,
+} from '~/server/models/GlobalNotificationSetting';
 import { configManager } from '~/server/service/config-manager';
 import { growiInfoService } from '~/server/service/growi-info';
 import loggerFactory from '~/utils/logger';
@@ -11,7 +14,6 @@ const logger = loggerFactory('growi:service:GlobalNotificationMailService'); //
  * sub service class of GlobalNotificationSetting
  */
 class GlobalNotificationMailService {
-
   /** @type {import('~/server/crowi').default} Crowi instance */
   crowi;
 
@@ -34,13 +36,19 @@ class GlobalNotificationMailService {
     const { mailService } = this.crowi;
 
     const GlobalNotification = this.crowi.model('GlobalNotificationSetting');
-    const notifications = await GlobalNotification.findSettingByPathAndEvent(event, page.path, GlobalNotificationSettingType.MAIL);
+    const notifications = await GlobalNotification.findSettingByPathAndEvent(
+      event,
+      page.path,
+      GlobalNotificationSettingType.MAIL,
+    );
 
     const option = this.generateOption(event, page, triggeredBy, vars);
 
-    await Promise.all(notifications.map((notification) => {
-      return mailService.send({ ...option, to: notification.toEmail });
-    }));
+    await Promise.all(
+      notifications.map((notification) => {
+        return mailService.send({ ...option, to: notification.toEmail });
+      }),
+    );
   }
 
   /**
@@ -59,10 +67,15 @@ class GlobalNotificationMailService {
     const locale = configManager.getConfig('app:globalLang');
     // validate for all events
     if (event == null || page == null || triggeredBy == null) {
-      throw new Error(`invalid vars supplied to GlobalNotificationMailService.generateOption for event ${event}`);
+      throw new Error(
+        `invalid vars supplied to GlobalNotificationMailService.generateOption for event ${event}`,
+      );
     }
 
-    const template = nodePath.join(this.crowi.localeDir, `${locale}/notifications/${event}.ejs`);
+    const template = nodePath.join(
+      this.crowi.localeDir,
+      `${locale}/notifications/${event}.ejs`,
+    );
 
     const path = page.path;
     const appTitle = this.crowi.appService.getAppTitle();
@@ -93,7 +106,9 @@ class GlobalNotificationMailService {
       case GlobalNotificationSettingEvent.PAGE_MOVE:
         // validate for page move
         if (oldPath == null) {
-          throw new Error(`invalid vars supplied to GlobalNotificationMailService.generateOption for event ${event}`);
+          throw new Error(
+            `invalid vars supplied to GlobalNotificationMailService.generateOption for event ${event}`,
+          );
         }
 
         subject = `#${event} - ${triggeredBy.username} moved ${oldPath} to ${path} at URL: ${pageUrl}`;
@@ -111,7 +126,9 @@ class GlobalNotificationMailService {
       case GlobalNotificationSettingEvent.COMMENT:
         // validate for comment
         if (comment == null) {
-          throw new Error(`invalid vars supplied to GlobalNotificationMailService.generateOption for event ${event}`);
+          throw new Error(
+            `invalid vars supplied to GlobalNotificationMailService.generateOption for event ${event}`,
+          );
         }
 
         subject = `#${event} - ${triggeredBy.username} commented on ${path} at URL: ${pageUrl}`;
@@ -131,7 +148,6 @@ class GlobalNotificationMailService {
       vars,
     };
   }
-
 }
 
 module.exports = GlobalNotificationMailService;

+ 42 - 17
apps/app/src/server/service/global-notification/global-notification-slack.js

@@ -1,11 +1,12 @@
 import { pagePathUtils } from '@growi/core/dist/utils';
 
-import { GlobalNotificationSettingEvent, GlobalNotificationSettingType } from '~/server/models/GlobalNotificationSetting';
+import {
+  GlobalNotificationSettingEvent,
+  GlobalNotificationSettingType,
+} from '~/server/models/GlobalNotificationSetting';
 import loggerFactory from '~/utils/logger';
 
-import {
-  prepareSlackMessageForGlobalNotification,
-} from '../../util/slack';
+import { prepareSlackMessageForGlobalNotification } from '../../util/slack';
 import { growiInfoService } from '../growi-info';
 
 const logger = loggerFactory('growi:service:GlobalNotificationSlackService'); // eslint-disable-line no-unused-vars
@@ -17,7 +18,6 @@ const { encodeSpaces } = pagePathUtils;
  * sub service class of GlobalNotificationSetting
  */
 class GlobalNotificationSlackService {
-
   /** @type {import('~/server/crowi').default} Crowi instance */
   crowi;
 
@@ -41,18 +41,40 @@ class GlobalNotificationSlackService {
     const { appService, slackIntegrationService } = this.crowi;
 
     const GlobalNotification = this.crowi.model('GlobalNotificationSetting');
-    const notifications = await GlobalNotification.findSettingByPathAndEvent(event, path, GlobalNotificationSettingType.SLACK);
-
-    const messageBody = this.generateMessageBody(event, id, path, triggeredBy, vars);
-    const attachmentBody = this.generateAttachmentBody(event, id, path, triggeredBy, vars);
+    const notifications = await GlobalNotification.findSettingByPathAndEvent(
+      event,
+      path,
+      GlobalNotificationSettingType.SLACK,
+    );
+
+    const messageBody = this.generateMessageBody(
+      event,
+      id,
+      path,
+      triggeredBy,
+      vars,
+    );
+    const attachmentBody = this.generateAttachmentBody(
+      event,
+      id,
+      path,
+      triggeredBy,
+      vars,
+    );
 
     const appTitle = appService.getAppTitle();
 
-    await Promise.all(notifications.map((notification) => {
-      const messageObj = prepareSlackMessageForGlobalNotification(messageBody, attachmentBody, appTitle, notification.slackChannels);
-      return slackIntegrationService.postMessage(messageObj);
-    }));
-
+    await Promise.all(
+      notifications.map((notification) => {
+        const messageObj = prepareSlackMessageForGlobalNotification(
+          messageBody,
+          attachmentBody,
+          appTitle,
+          notification.slackChannels,
+        );
+        return slackIntegrationService.postMessage(messageObj);
+      }),
+    );
   }
 
   /**
@@ -88,7 +110,9 @@ class GlobalNotificationSlackService {
       case GlobalNotificationSettingEvent.PAGE_MOVE:
         // validate for page move
         if (oldPath == null) {
-          throw new Error(`invalid vars supplied to GlobalNotificationSlackService.generateOption for event ${event}`);
+          throw new Error(
+            `invalid vars supplied to GlobalNotificationSlackService.generateOption for event ${event}`,
+          );
         }
         // eslint-disable-next-line no-case-declarations
         messageBody = `:bell: ${username} moved ${oldPath} to ${parmaLink}`;
@@ -99,7 +123,9 @@ class GlobalNotificationSlackService {
       case GlobalNotificationSettingEvent.COMMENT:
         // validate for comment
         if (comment == null) {
-          throw new Error(`invalid vars supplied to GlobalNotificationSlackService.generateOption for event ${event}`);
+          throw new Error(
+            `invalid vars supplied to GlobalNotificationSlackService.generateOption for event ${event}`,
+          );
         }
         messageBody = `:bell: ${username} commented on ${parmaLink}`;
         break;
@@ -148,7 +174,6 @@ class GlobalNotificationSlackService {
 
     return attachmentBody;
   }
-
 }
 
 module.exports = GlobalNotificationSlackService;

+ 16 - 8
apps/app/src/server/service/global-notification/index.js

@@ -8,7 +8,6 @@ const GloabalNotificationSlack = require('./global-notification-slack');
  * service class of GlobalNotificationSetting
  */
 class GlobalNotificationService {
-
   /** @type {import('~/server/crowi').default} Crowi instance */
   crowi;
 
@@ -21,10 +20,8 @@ class GlobalNotificationService {
     this.gloabalNotificationSlack = new GloabalNotificationSlack(crowi);
 
     this.Page = this.crowi.model('Page');
-
   }
 
-
   /**
    * fire global notification
    *
@@ -40,7 +37,9 @@ class GlobalNotificationService {
 
     // validation
     if (event == null || page.path == null || triggeredBy == null) {
-      throw new Error(`invalid vars supplied to GlobalNotificationSlackService.generateOption for event ${event}`);
+      throw new Error(
+        `invalid vars supplied to GlobalNotificationSlackService.generateOption for event ${event}`,
+      );
     }
 
     if (!this.isSendNotification(page.grant)) {
@@ -50,7 +49,13 @@ class GlobalNotificationService {
 
     await Promise.all([
       this.gloabalNotificationMail.fire(event, page, triggeredBy, vars),
-      this.gloabalNotificationSlack.fire(event, page.id, page.path, triggeredBy, vars),
+      this.gloabalNotificationSlack.fire(
+        event,
+        page.id,
+        page.path,
+        triggeredBy,
+        vars,
+      ),
     ]);
   }
 
@@ -71,12 +76,15 @@ class GlobalNotificationService {
       case this.Page.GRANT_SPECIFIED:
         return false;
       case this.Page.GRANT_OWNER:
-        return (this.crowi.configManager.getConfig('notification:owner-page:isEnabled'));
+        return this.crowi.configManager.getConfig(
+          'notification:owner-page:isEnabled',
+        );
       case this.Page.GRANT_USER_GROUP:
-        return (this.crowi.configManager.getConfig('notification:group-page:isEnabled'));
+        return this.crowi.configManager.getConfig(
+          'notification:group-page:isEnabled',
+        );
     }
   }
-
 }
 
 module.exports = GlobalNotificationService;

+ 8 - 11
apps/app/src/server/service/growi-bridge/index.ts

@@ -2,17 +2,14 @@ import fs from 'fs';
 import path from 'path';
 import { pipeline } from 'stream';
 import { finished } from 'stream/promises';
-
 import unzipStream, { type Entry } from 'unzip-stream';
 
 import type Crowi from '~/server/crowi';
 import loggerFactory from '~/utils/logger';
 
 import type { ZipFileStat } from '../interfaces/export';
-
 import { tapStreamDataByPromise } from './unzip-stream-utils';
 
-
 const logger = loggerFactory('growi:services:GrowiBridgeService'); // eslint-disable-line no-unused-vars
 
 /**
@@ -20,7 +17,6 @@ const logger = loggerFactory('growi:services:GrowiBridgeService'); // eslint-dis
  * common properties and methods between export service and import service are defined in this service
  */
 export class GrowiBridgeService {
-
   crowi: Crowi;
 
   encoding: BufferEncoding = 'utf-8';
@@ -76,7 +72,11 @@ export class GrowiBridgeService {
    */
   async parseZipFile(zipFile: string): Promise<ZipFileStat | null> {
     const fileStat = fs.statSync(zipFile);
-    const innerFileStats: Array<{ fileName: string, collectionName: string, size: number }> = [];
+    const innerFileStats: Array<{
+      fileName: string;
+      collectionName: string;
+      size: number;
+    }> = [];
     let meta = {};
 
     const readStream = fs.createReadStream(zipFile);
@@ -92,8 +92,7 @@ export class GrowiBridgeService {
         tapPromise = tapStreamDataByPromise(entry).then((metaBuffer) => {
           meta = JSON.parse(metaBuffer.toString());
         });
-      }
-      else {
+      } else {
         innerFileStats.push({
           fileName,
           collectionName: path.basename(fileName, '.json'),
@@ -106,9 +105,8 @@ export class GrowiBridgeService {
     try {
       await finished(unzipEntryStream);
       await tapPromise;
-    }
-    // if zip is broken
-    catch (err) {
+    } catch (err) {
+      // if zip is broken
       logger.error(err);
       return null;
     }
@@ -121,5 +119,4 @@ export class GrowiBridgeService {
       innerFileStats,
     };
   }
-
 }

+ 1 - 4
apps/app/src/server/service/growi-bridge/unzip-stream-utils.ts

@@ -1,5 +1,4 @@
 import { PassThrough } from 'stream';
-
 import type { Entry } from 'unzip-stream';
 
 export const tapStreamDataByPromise = (entry: Entry): Promise<Buffer> => {
@@ -15,8 +14,6 @@ export const tapStreamDataByPromise = (entry: Entry): Promise<Buffer> => {
       })
       .on('error', reject);
 
-    entry
-      .pipe(entryContentGetterStream)
-      .on('error', reject);
+    entry.pipe(entryContentGetterStream).on('error', reject);
   });
 };

+ 19 - 16
apps/app/src/server/service/growi-info/growi-info.integ.ts

@@ -1,7 +1,7 @@
-import type { IPage } from '^/../../packages/core/dist';
 import mongoose from 'mongoose';
 import { mock } from 'vitest-mock-extended';
 
+import type { IPage } from '^/../../packages/core/dist';
 import pkg from '^/package.json';
 
 import type UserEvent from '~/server/events/user';
@@ -11,7 +11,6 @@ import { configManager } from '~/server/service/config-manager';
 import type Crowi from '../../crowi';
 import type { PageModel } from '../../models/page';
 import pageModel from '../../models/page';
-
 import { growiInfoService } from './growi-info';
 
 describe('GrowiInfoService', () => {
@@ -20,7 +19,7 @@ describe('GrowiInfoService', () => {
   let User;
   let Page;
 
-  beforeAll(async() => {
+  beforeAll(async () => {
     process.env.APP_SITE_URL = 'http://growi.test.jp';
     process.env.DEPLOYMENT_TYPE = 'growi-docker-compose';
     process.env.SAML_ENABLED = 'true';
@@ -65,8 +64,7 @@ describe('GrowiInfoService', () => {
   });
 
   describe('getGrowiInfo', () => {
-
-    test('Should get correct GROWI info', async() => {
+    test('Should get correct GROWI info', async () => {
       const growiInfo = await growiInfoService.getGrowiInfo();
 
       assert(growiInfo != null);
@@ -89,7 +87,7 @@ describe('GrowiInfoService', () => {
       });
     });
 
-    test('Should get correct GROWI info with additionalInfo', async() => {
+    test('Should get correct GROWI info with additionalInfo', async () => {
       // arrange
       await User.create({
         username: 'growiinfo test user',
@@ -129,9 +127,11 @@ describe('GrowiInfoService', () => {
       });
     });
 
-    test('Should get correct GROWI info with specific options - attachment only', async() => {
+    test('Should get correct GROWI info with specific options - attachment only', async () => {
       // act
-      const growiInfo = await growiInfoService.getGrowiInfo({ includeAttachmentInfo: true });
+      const growiInfo = await growiInfoService.getGrowiInfo({
+        includeAttachmentInfo: true,
+      });
 
       // assert
       assert(growiInfo != null);
@@ -141,9 +141,11 @@ describe('GrowiInfoService', () => {
       });
     });
 
-    test('Should get correct GROWI info with specific options - user count only', async() => {
+    test('Should get correct GROWI info with specific options - user count only', async () => {
       // act
-      const growiInfo = await growiInfoService.getGrowiInfo({ includeUserCountInfo: true });
+      const growiInfo = await growiInfoService.getGrowiInfo({
+        includeUserCountInfo: true,
+      });
 
       // assert
       assert(growiInfo != null);
@@ -155,9 +157,11 @@ describe('GrowiInfoService', () => {
       });
     });
 
-    test('Should get correct GROWI info with specific options - installed info only', async() => {
+    test('Should get correct GROWI info with specific options - installed info only', async () => {
       // act
-      const growiInfo = await growiInfoService.getGrowiInfo({ includeInstalledInfo: true });
+      const growiInfo = await growiInfoService.getGrowiInfo({
+        includeInstalledInfo: true,
+      });
 
       // assert
       assert(growiInfo != null);
@@ -169,7 +173,7 @@ describe('GrowiInfoService', () => {
       });
     });
 
-    test('Should get correct GROWI info with combined options', async() => {
+    test('Should get correct GROWI info with combined options', async () => {
       // act
       const growiInfo = await growiInfoService.getGrowiInfo({
         includeAttachmentInfo: true,
@@ -188,7 +192,7 @@ describe('GrowiInfoService', () => {
       });
     });
 
-    test('Should get correct GROWI info with all options', async() => {
+    test('Should get correct GROWI info with all options', async () => {
       // act
       const growiInfo = await growiInfoService.getGrowiInfo({
         includeAttachmentInfo: true,
@@ -210,7 +214,7 @@ describe('GrowiInfoService', () => {
       });
     });
 
-    test('Should get correct GROWI info with empty options', async() => {
+    test('Should get correct GROWI info with empty options', async () => {
       // act
       const growiInfo = await growiInfoService.getGrowiInfo({});
 
@@ -227,6 +231,5 @@ describe('GrowiInfoService', () => {
         osInfo: growiInfo.osInfo, // Keep the osInfo as it's dynamic
       });
     });
-
   });
 });

+ 44 - 21
apps/app/src/server/service/growi-info/growi-info.ts

@@ -1,9 +1,8 @@
 import * as os from 'node:os';
-
 import type {
-  IGrowiInfo,
   GrowiInfoOptions,
   IGrowiAdditionalInfoResult,
+  IGrowiInfo,
 } from '@growi/core';
 import type { IUser } from '@growi/core/dist/interfaces';
 import { GrowiWikiType } from '@growi/core/dist/interfaces';
@@ -26,9 +25,7 @@ const FULL_ADDITIONAL_INFO_OPTIONS = {
   includePageCountInfo: true,
 } as const;
 
-
 export class GrowiInfoService {
-
   /**
    * get the site url
    *
@@ -55,23 +52,36 @@ export class GrowiInfoService {
    * Get GROWI information with flexible options
    * @param options options to determine what additional information to include
    */
-  getGrowiInfo<T extends GrowiInfoOptions>(options: T): Promise<IGrowiInfo<IGrowiAdditionalInfoResult<T>>>;
+  getGrowiInfo<T extends GrowiInfoOptions>(
+    options: T,
+  ): Promise<IGrowiInfo<IGrowiAdditionalInfoResult<T>>>;
 
   /**
    * Get GROWI information with additional information (legacy)
    * @param includeAdditionalInfo whether to include additional information
    * @deprecated Use getGrowiInfo(options) instead
    */
-  getGrowiInfo(includeAdditionalInfo: true): Promise<IGrowiInfo<IGrowiAdditionalInfoResult<typeof FULL_ADDITIONAL_INFO_OPTIONS>>>;
+  getGrowiInfo(
+    includeAdditionalInfo: true,
+  ): Promise<
+    IGrowiInfo<IGrowiAdditionalInfoResult<typeof FULL_ADDITIONAL_INFO_OPTIONS>>
+  >;
 
   async getGrowiInfo<T extends GrowiInfoOptions>(
-      optionsOrLegacyFlag?: T | true,
-  ): Promise<IGrowiInfo<IGrowiAdditionalInfoResult<T>> | IGrowiInfo<undefined> | IGrowiInfo<IGrowiAdditionalInfoResult<typeof FULL_ADDITIONAL_INFO_OPTIONS>>> {
-
+    optionsOrLegacyFlag?: T | true,
+  ): Promise<
+    | IGrowiInfo<IGrowiAdditionalInfoResult<T>>
+    | IGrowiInfo<undefined>
+    | IGrowiInfo<
+        IGrowiAdditionalInfoResult<typeof FULL_ADDITIONAL_INFO_OPTIONS>
+      >
+  > {
     const appSiteUrl = this.getSiteUrl();
 
     const isGuestAllowedToRead = aclService.isGuestAllowedToRead();
-    const wikiType = isGuestAllowedToRead ? GrowiWikiType.open : GrowiWikiType.closed;
+    const wikiType = isGuestAllowedToRead
+      ? GrowiWikiType.open
+      : GrowiWikiType.closed;
 
     const baseInfo = {
       serviceInstanceId: configManager.getConfig('app:serviceInstanceId'),
@@ -98,8 +108,7 @@ export class GrowiInfoService {
     if (typeof optionsOrLegacyFlag === 'boolean') {
       // Legacy boolean parameter
       options = optionsOrLegacyFlag ? FULL_ADDITIONAL_INFO_OPTIONS : {};
-    }
-    else {
+    } else {
       // GrowiInfoOptions parameter
       options = optionsOrLegacyFlag;
     }
@@ -116,21 +125,28 @@ export class GrowiInfoService {
     } as IGrowiInfo<IGrowiAdditionalInfoResult<T>>;
   }
 
-  private async getAdditionalInfoByOptions<T extends GrowiInfoOptions>(options: T): Promise<IGrowiAdditionalInfoResult<T>> {
+  private async getAdditionalInfoByOptions<T extends GrowiInfoOptions>(
+    options: T,
+  ): Promise<IGrowiAdditionalInfoResult<T>> {
     const User = mongoose.model<IUser, Model<IUser>>('User');
     const Page = mongoose.model<PageDocument, PageModel>('Page');
 
     // Check if any option is enabled to determine if we should return additional info
-    const hasAnyOption = options.includeAttachmentInfo || options.includeInstalledInfo || options.includeUserCountInfo || options.includePageCountInfo;
+    const hasAnyOption =
+      options.includeAttachmentInfo ||
+      options.includeInstalledInfo ||
+      options.includeUserCountInfo ||
+      options.includePageCountInfo;
 
     if (!hasAnyOption) {
       return undefined as IGrowiAdditionalInfoResult<T>;
     }
 
     // Include attachment info (required for all additional info)
-    const activeExternalAccountTypes: IExternalAuthProviderType[] = Object.values(IExternalAuthProviderType).filter((type) => {
-      return configManager.getConfig(`security:passport-${type}:isEnabled`);
-    });
+    const activeExternalAccountTypes: IExternalAuthProviderType[] =
+      Object.values(IExternalAuthProviderType).filter((type) => {
+        return configManager.getConfig(`security:passport-${type}:isEnabled`);
+      });
 
     // Build result incrementally with proper typing
     const partialResult: Partial<{
@@ -148,12 +164,18 @@ export class GrowiInfoService {
 
     if (options.includeInstalledInfo) {
       // Get the oldest user who probably installed this GROWI.
-      const user = await User.findOne({ createdAt: { $ne: null } }).sort({ createdAt: 1 });
+      const user = await User.findOne({ createdAt: { $ne: null } }).sort({
+        createdAt: 1,
+      });
       const installedAtByOldestUser = user ? user.createdAt : null;
 
       const appInstalledConfig = await Config.findOne({ key: 'app:installed' });
       const oldestConfig = await Config.findOne().sort({ createdAt: 1 });
-      const installedAt = installedAtByOldestUser ?? appInstalledConfig?.createdAt ?? oldestConfig?.createdAt ?? null;
+      const installedAt =
+        installedAtByOldestUser ??
+        appInstalledConfig?.createdAt ??
+        oldestConfig?.createdAt ??
+        null;
 
       partialResult.installedAt = installedAt;
       partialResult.installedAtByOldestUser = installedAtByOldestUser;
@@ -161,7 +183,9 @@ export class GrowiInfoService {
 
     if (options.includeUserCountInfo) {
       const currentUsersCount = await User.countDocuments();
-      const currentActiveUsersCount = await (User as unknown as { countActiveUsers: () => Promise<number> }).countActiveUsers();
+      const currentActiveUsersCount = await (
+        User as unknown as { countActiveUsers: () => Promise<number> }
+      ).countActiveUsers();
 
       partialResult.currentUsersCount = currentUsersCount;
       partialResult.currentActiveUsersCount = currentActiveUsersCount;
@@ -175,7 +199,6 @@ export class GrowiInfoService {
 
     return partialResult as IGrowiAdditionalInfoResult<T>;
   }
-
 }
 
 export const growiInfoService = new GrowiInfoService();

+ 5 - 4
apps/app/src/server/service/import/construct-convert-map.integ.ts

@@ -1,15 +1,16 @@
 import type { EventEmitter } from 'events';
-
 import { mock } from 'vitest-mock-extended';
 
 import type Crowi from '~/server/crowi';
-import { setupIndependentModels, setupModelsDependentOnCrowi } from '~/server/crowi/setup-models';
+import {
+  setupIndependentModels,
+  setupModelsDependentOnCrowi,
+} from '~/server/crowi/setup-models';
 
 import { constructConvertMap } from './construct-convert-map';
 
 describe('constructConvertMap', () => {
-
-  beforeAll(async() => {
+  beforeAll(async () => {
     const events = {
       page: mock<EventEmitter>(),
       user: mock<EventEmitter>(),

+ 11 - 6
apps/app/src/server/service/import/construct-convert-map.ts

@@ -3,18 +3,20 @@ import mongoose from 'mongoose';
 import type { OverwriteFunction } from './overwrite-function';
 import { keepOriginal } from './overwrite-function';
 
-
 export type ConvertMap = {
   [collectionName: string]: {
-    [propertyName: string]: OverwriteFunction,
-  }
-}
+    [propertyName: string]: OverwriteFunction;
+  };
+};
 
 /**
  * Special conversion functions for problematic fields
  * Add entries here for fields that require custom handling during import
  */
-const SPECIAL_CONVERT_FUNCTIONS: Record<string, Record<string, OverwriteFunction>> = {
+const SPECIAL_CONVERT_FUNCTIONS: Record<
+  string,
+  Record<string, OverwriteFunction>
+> = {
   activities: {
     snapshot: (value: unknown) => value, // Skip SubdocumentPath casting to avoid Mongoose errors
   },
@@ -27,7 +29,10 @@ const SPECIAL_CONVERT_FUNCTIONS: Record<string, Record<string, OverwriteFunction
 /**
  * Get special conversion function for a specific collection.field combination
  */
-const getSpecialConvertFunction = (collectionName: string, propertyName: string): OverwriteFunction | null => {
+const getSpecialConvertFunction = (
+  collectionName: string,
+  propertyName: string,
+): OverwriteFunction | null => {
   return SPECIAL_CONVERT_FUNCTIONS[collectionName]?.[propertyName] ?? null;
 };
 

+ 12 - 8
apps/app/src/server/service/import/get-model-from-collection-name.ts

@@ -2,15 +2,19 @@ import type { Model } from 'mongoose';
 import mongoose from 'mongoose';
 
 /**
-   * get a model from collection name
-   *
-   * @memberOf GrowiBridgeService
-   * @param collectionName collection name
-   * @return instance of mongoose model
-   */
+ * get a model from collection name
+ *
+ * @memberOf GrowiBridgeService
+ * @param collectionName collection name
+ * @return instance of mongoose model
+ */
 // eslint-disable-next-line @typescript-eslint/no-explicit-any
-export const getModelFromCollectionName = (collectionName: string): Model<any, unknown, unknown, unknown, any> | undefined => {
-  const models = mongoose.modelNames().map(modelName => mongoose.model(modelName));
+export const getModelFromCollectionName = (
+  collectionName: string,
+): Model<any, unknown, unknown, unknown, any> | undefined => {
+  const models = mongoose
+    .modelNames()
+    .map((modelName) => mongoose.model(modelName));
 
   const Model = Object.values(models).find((m) => {
     return m.collection != null && m.collection.name === collectionName;

+ 7 - 5
apps/app/src/server/service/import/import-settings.ts

@@ -2,10 +2,12 @@ import type { ImportMode } from '~/models/admin/import-mode';
 
 import type { OverwriteFunction } from './overwrite-function';
 
-export type OverwriteParams = { [propertyName: string]: OverwriteFunction | unknown }
+export type OverwriteParams = {
+  [propertyName: string]: OverwriteFunction | unknown;
+};
 
 export type ImportSettings = {
-  mode: ImportMode,
-  jsonFileName: string,
-  overwriteParams: OverwriteParams,
-}
+  mode: ImportMode;
+  jsonFileName: string;
+  overwriteParams: OverwriteParams;
+};

+ 9 - 8
apps/app/src/server/service/import/import.spec.ts

@@ -4,7 +4,6 @@ import type Crowi from '~/server/crowi';
 
 import { ImportService } from './import';
 
-
 const mocks = vi.hoisted(() => {
   return {
     constructConvertMapMock: vi.fn(),
@@ -19,20 +18,20 @@ vi.mock('./construct-convert-map', () => ({
   constructConvertMap: mocks.constructConvertMapMock,
 }));
 
-
 /**
  * Get private property from ImportService
  */
-const getPrivateProperty = <T>(importService: ImportService, propertyName: string): T => {
+const getPrivateProperty = <T>(
+  importService: ImportService,
+  propertyName: string,
+): T => {
   return importService[propertyName];
 };
 
-
 describe('ImportService', () => {
-
   let importService: ImportService;
 
-  beforeAll(async() => {
+  beforeAll(async () => {
     const crowiMock = mock<Crowi>({
       growiBridgeService: {
         getFile: vi.fn(),
@@ -44,7 +43,7 @@ describe('ImportService', () => {
   });
 
   describe('preImport', () => {
-    test('should call setupIndependentModels', async() => {
+    test('should call setupIndependentModels', async () => {
       // arrange
       const convertMapMock = mock();
       mocks.constructConvertMapMock.mockImplementation(() => convertMapMock);
@@ -55,7 +54,9 @@ describe('ImportService', () => {
       // assert
       expect(mocks.setupIndependentModelsMock).toHaveBeenCalledOnce();
       expect(mocks.constructConvertMapMock).toHaveBeenCalledOnce();
-      expect(getPrivateProperty(importService, 'convertMap')).toStrictEqual(convertMapMock);
+      expect(getPrivateProperty(importService, 'convertMap')).toStrictEqual(
+        convertMapMock,
+      );
     });
   });
 });

+ 189 - 103
apps/app/src/server/service/import/import.ts

@@ -1,16 +1,18 @@
-import fs from 'fs';
-import path from 'path';
-import type { EventEmitter } from 'stream';
-import { Writable, Transform } from 'stream';
-import { pipeline } from 'stream/promises';
-
-import JSONStream from 'JSONStream';
 import gc from 'expose-gc/function';
+import fs from 'fs';
 import type {
-  BulkWriteResult, MongoBulkWriteError, UnorderedBulkOperation, WriteError, BulkOperationBase,
+  BulkOperationBase,
+  BulkWriteResult,
+  MongoBulkWriteError,
+  UnorderedBulkOperation,
+  WriteError,
 } from 'mongodb';
 import type { Document } from 'mongoose';
 import mongoose from 'mongoose';
+import path from 'path';
+import type { EventEmitter } from 'stream';
+import { Transform, Writable } from 'stream';
+import { pipeline } from 'stream/promises';
 import unzipStream from 'unzip-stream';
 
 import { ImportMode } from '~/models/admin/import-mode';
@@ -23,35 +25,29 @@ import loggerFactory from '~/utils/logger';
 import CollectionProgressingStatus from '../../models/vo/collection-progressing-status';
 import { createBatchStream } from '../../util/batch-stream';
 import { configManager } from '../config-manager';
-
 import type { ConvertMap } from './construct-convert-map';
 import { constructConvertMap } from './construct-convert-map';
 import { getModelFromCollectionName } from './get-model-from-collection-name';
 import type { ImportSettings, OverwriteParams } from './import-settings';
 import { keepOriginal } from './overwrite-function';
 
+import JSONStream from 'JSONStream';
 
 const logger = loggerFactory('growi:services:ImportService'); // eslint-disable-line no-unused-vars
 
-
 const BULK_IMPORT_SIZE = 100;
 
-
 class ImportingCollectionError extends Error {
-
   collectionProgress: CollectionProgress;
 
   constructor(collectionProgress, error) {
     super(error);
     this.collectionProgress = collectionProgress;
   }
-
 }
 
-
 export class ImportService {
-
-  private modelCache: Map<string, { Model: any, schema: any }> = new Map();
+  private modelCache: Map<string, { Model: any; schema: any }> = new Map();
 
   private crowi: Crowi;
 
@@ -87,7 +83,9 @@ export class ImportService {
    * @return {object} info for zip files and whether currentProgressingStatus exists
    */
   async getStatus() {
-    const zipFiles = fs.readdirSync(this.baseDir).filter(file => path.extname(file) === '.zip');
+    const zipFiles = fs
+      .readdirSync(this.baseDir)
+      .filter((file) => path.extname(file) === '.zip');
 
     // process serially so as not to waste memory
     const zipFileStats: any[] = [];
@@ -100,10 +98,11 @@ export class ImportService {
     }
 
     // filter null object (broken zip)
-    const filtered = zipFileStats
-      .filter(zipFileStat => zipFileStat != null);
+    const filtered = zipFileStats.filter((zipFileStat) => zipFileStat != null);
     // sort with ctime("Change Time" - Time when file status was last changed (inode data modification).)
-    filtered.sort((a, b) => { return a.fileStat.ctime - b.fileStat.ctime });
+    filtered.sort((a, b) => {
+      return a.fileStat.ctime - b.fileStat.ctime;
+    });
 
     const zipFileStat = filtered.pop();
     let isTheSameVersion = false;
@@ -112,14 +111,12 @@ export class ImportService {
       try {
         this.validate(zipFileStat.meta);
         isTheSameVersion = true;
-      }
-      catch (err) {
+      } catch (err) {
         isTheSameVersion = false;
         logger.error('the versions are not met', err);
       }
     }
 
-
     return {
       isTheSameVersion,
       zipFileStat,
@@ -128,7 +125,6 @@ export class ImportService {
     };
   }
 
-
   async preImport() {
     await setupIndependentModels();
 
@@ -141,11 +137,16 @@ export class ImportService {
    * @param collections MongoDB collection name
    * @param importSettingsMap
    */
-  async import(collections: string[], importSettingsMap: Map<string, ImportSettings>): Promise<void> {
+  async import(
+    collections: string[],
+    importSettingsMap: Map<string, ImportSettings>,
+  ): Promise<void> {
     await this.preImport();
 
     // init status object
-    this.currentProgressingStatus = new CollectionProgressingStatus(collections);
+    this.currentProgressingStatus = new CollectionProgressingStatus(
+      collections,
+    );
 
     // process serially so as not to waste memory
     const promises = collections.map((collectionName) => {
@@ -158,11 +159,13 @@ export class ImportService {
     for await (const promise of promises) {
       try {
         await promise;
-      }
-      // catch ImportingCollectionError
-      catch (err) {
+      } catch (err) {
+        // catch ImportingCollectionError
         const { collectionProgress } = err;
-        logger.error(`failed to import to ${collectionProgress.collectionName}`, err);
+        logger.error(
+          `failed to import to ${collectionProgress.collectionName}`,
+          err,
+        );
         this.emitProgressEvent(collectionProgress, { message: err.message });
       }
     }
@@ -174,9 +177,11 @@ export class ImportService {
 
     const currentIsV5Compatible = configManager.getConfig('app:isV5Compatible');
     const isImportPagesCollection = collections.includes('pages');
-    const shouldNormalizePages = currentIsV5Compatible && isImportPagesCollection;
+    const shouldNormalizePages =
+      currentIsV5Compatible && isImportPagesCollection;
 
-    if (shouldNormalizePages) await this.crowi.pageService.normalizeAllPublicPages();
+    if (shouldNormalizePages)
+      await this.crowi.pageService.normalizeAllPublicPages();
 
     // Release caches after import process
     this.modelCache.clear();
@@ -188,15 +193,21 @@ export class ImportService {
    *
    * @memberOf ImportService
    */
-  protected async importCollection(collectionName: string, importSettings: ImportSettings): Promise<void> {
+  protected async importCollection(
+    collectionName: string,
+    importSettings: ImportSettings,
+  ): Promise<void> {
     if (this.currentProgressingStatus == null) {
-      throw new Error('Something went wrong: currentProgressingStatus is not initialized');
+      throw new Error(
+        'Something went wrong: currentProgressingStatus is not initialized',
+      );
     }
     // Avoid closure references by passing direct method references
     const collection = mongoose.connection.collection(collectionName);
 
     const { mode, jsonFileName, overwriteParams } = importSettings;
-    const collectionProgress = this.currentProgressingStatus.progressMap[collectionName];
+    const collectionProgress =
+      this.currentProgressingStatus.progressMap[collectionName];
 
     try {
       const jsonFile = this.getFile(jsonFileName);
@@ -210,7 +221,9 @@ export class ImportService {
       }
 
       // stream 1
-      const readStream = fs.createReadStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
+      const readStream = fs.createReadStream(jsonFile, {
+        encoding: this.growiBridgeService.getEncoding(),
+      });
 
       // stream 2
       const jsonStream = JSONStream.parse('*');
@@ -220,12 +233,15 @@ export class ImportService {
         objectMode: true,
         transform(this: Transform, doc, encoding, callback) {
           try {
-          // Direct reference to convertDocuments
-            const converted = (importSettings as any).service.convertDocuments(collectionName, doc, overwriteParams);
+            // Direct reference to convertDocuments
+            const converted = (importSettings as any).service.convertDocuments(
+              collectionName,
+              doc,
+              overwriteParams,
+            );
             this.push(converted);
             callback();
-          }
-          catch (error) {
+          } catch (error) {
             callback(error);
           }
         },
@@ -237,33 +253,47 @@ export class ImportService {
       const batchStream = createBatchStream(BULK_IMPORT_SIZE);
       const writeStream = new Writable({
         objectMode: true,
-        write: async(batch, encoding, callback) => {
+        write: async (batch, encoding, callback) => {
           try {
             const unorderedBulkOp = collection.initializeUnorderedBulkOp();
             // documents are not persisted until unorderedBulkOp.execute()
             batch.forEach((document) => {
-              this.bulkOperate(unorderedBulkOp, collectionName, document, importSettings);
+              this.bulkOperate(
+                unorderedBulkOp,
+                collectionName,
+                document,
+                importSettings,
+              );
             });
 
             // exec
-            const { result, errors } = await this.execUnorderedBulkOpSafely(unorderedBulkOp);
+            const { result, errors } =
+              await this.execUnorderedBulkOpSafely(unorderedBulkOp);
             const {
-              insertedCount, modifiedCount, upsertedCount, matchedCount,
+              insertedCount,
+              modifiedCount,
+              upsertedCount,
+              matchedCount,
             } = result;
             const errorCount = errors?.length ?? 0;
 
             // For upsert operations, count matched documents as modified
-            const actualModifiedCount = importSettings.mode === ImportMode.upsert
-              ? (matchedCount || 0) // In upsert mode, matchedCount indicates documents that were found and potentially updated
-              : modifiedCount;
-
-            const actualInsertedCount = importSettings.mode === ImportMode.upsert
-              ? (upsertedCount || 0) // In upsert mode, upsertedCount indicates newly created documents
-              : insertedCount;
-
-            logger.debug(`Importing ${collectionName}. Inserted: ${actualInsertedCount}. Modified: ${actualModifiedCount}. Failed: ${errorCount}.`
-              + ` (Raw: inserted=${insertedCount}, modified=${modifiedCount}, upserted=${upsertedCount}, matched=${matchedCount})`);
-            const increment = actualInsertedCount + actualModifiedCount + errorCount;
+            const actualModifiedCount =
+              importSettings.mode === ImportMode.upsert
+                ? matchedCount || 0 // In upsert mode, matchedCount indicates documents that were found and potentially updated
+                : modifiedCount;
+
+            const actualInsertedCount =
+              importSettings.mode === ImportMode.upsert
+                ? upsertedCount || 0 // In upsert mode, upsertedCount indicates newly created documents
+                : insertedCount;
+
+            logger.debug(
+              `Importing ${collectionName}. Inserted: ${actualInsertedCount}. Modified: ${actualModifiedCount}. Failed: ${errorCount}.` +
+                ` (Raw: inserted=${insertedCount}, modified=${modifiedCount}, upserted=${upsertedCount}, matched=${matchedCount})`,
+            );
+            const increment =
+              actualInsertedCount + actualModifiedCount + errorCount;
             collectionProgress.currentCount += increment;
             collectionProgress.totalCount += increment;
             collectionProgress.insertedCount += actualInsertedCount;
@@ -273,13 +303,11 @@ export class ImportService {
             try {
               logger.info('global.gc() invoked.');
               gc();
-            }
-            catch (err) {
+            } catch (err) {
               logger.error('fail garbage collection: ', err);
             }
             callback();
-          }
-          catch (err) {
+          } catch (err) {
             logger.error('Error in writeStream:', err);
             callback(err);
           }
@@ -290,29 +318,41 @@ export class ImportService {
         },
       });
 
-      await pipeline(readStream, jsonStream, convertStream, batchStream, writeStream);
+      await pipeline(
+        readStream,
+        jsonStream,
+        convertStream,
+        batchStream,
+        writeStream,
+      );
 
       // Ensure final progress event is emitted even when no data was processed
       if (collectionProgress.currentCount === 0) {
-        logger.info(`No data processed for collection ${collectionName}. Emitting final progress event.`);
+        logger.info(
+          `No data processed for collection ${collectionName}. Emitting final progress event.`,
+        );
         this.emitProgressEvent(collectionProgress, null);
       }
 
       // clean up tmp directory
       fs.unlinkSync(jsonFile);
-    }
-    catch (err) {
+    } catch (err) {
       throw new ImportingCollectionError(collectionProgress, err);
     }
   }
 
-  validateImportSettings(collectionName: string, importSettings: ImportSettings): void {
+  validateImportSettings(
+    collectionName: string,
+    importSettings: ImportSettings,
+  ): void {
     const { mode } = importSettings;
 
     switch (collectionName) {
       case 'configs':
         if (mode !== ImportMode.flushAndInsert) {
-          throw new Error(`The specified mode '${mode}' is not allowed when importing to 'configs' collection.`);
+          throw new Error(
+            `The specified mode '${mode}' is not allowed when importing to 'configs' collection.`,
+          );
         }
         break;
     }
@@ -322,10 +362,10 @@ export class ImportService {
    * process bulk operation
    */
   bulkOperate(
-      bulk: UnorderedBulkOperation,
-      collectionName: string,
-      document: Record<string, unknown>,
-      importSettings: ImportSettings,
+    bulk: UnorderedBulkOperation,
+    collectionName: string,
+    document: Record<string, unknown>,
+    importSettings: ImportSettings,
   ): BulkOperationBase | void {
     // insert
     if (importSettings.mode !== ImportMode.upsert) {
@@ -346,11 +386,18 @@ export class ImportService {
    * @param {CollectionProgress} collectionProgress
    * @param {object} appendedErrors key: collection name, value: array of error object
    */
-  emitProgressEvent(collectionProgress: CollectionProgress, appendedErrors: any): void {
+  emitProgressEvent(
+    collectionProgress: CollectionProgress,
+    appendedErrors: any,
+  ): void {
     const { collectionName } = collectionProgress;
 
     // send event (in progress in global)
-    this.adminEvent.emit('onProgressForImport', { collectionName, collectionProgress, appendedErrors });
+    this.adminEvent.emit('onProgressForImport', {
+      collectionName,
+      collectionProgress,
+      appendedErrors,
+    });
   }
 
   /**
@@ -387,11 +434,12 @@ export class ImportService {
       if (fileName === this.growiBridgeService.getMetaFileName()) {
         // skip meta.json
         entry.autodrain();
-      }
-      else {
+      } else {
         const entryPromise = new Promise<string | null>((resolve) => {
           const jsonFile = path.join(this.baseDir, fileName);
-          const writeStream = fs.createWriteStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
+          const writeStream = fs.createWriteStream(jsonFile, {
+            encoding: this.growiBridgeService.getEncoding(),
+          });
 
           pipeline(entry, writeStream)
             .then(() => resolve(jsonFile))
@@ -409,8 +457,11 @@ export class ImportService {
     const results = await Promise.allSettled(entryPromises);
 
     return results
-      .filter((result): result is PromiseFulfilledResult<string> => result.status === 'fulfilled' && result.value !== null)
-      .map(result => result.value);
+      .filter(
+        (result): result is PromiseFulfilledResult<string> =>
+          result.status === 'fulfilled' && result.value !== null,
+      )
+      .map((result) => result.value);
   }
 
   /**
@@ -418,13 +469,14 @@ export class ImportService {
    *
    * @memberOf ImportService
    */
-  async execUnorderedBulkOpSafely(unorderedBulkOp: UnorderedBulkOperation): Promise<{ result: BulkWriteResult, errors?: WriteError[] }> {
+  async execUnorderedBulkOpSafely(
+    unorderedBulkOp: UnorderedBulkOperation,
+  ): Promise<{ result: BulkWriteResult; errors?: WriteError[] }> {
     try {
       return {
         result: await unorderedBulkOp.execute(),
       };
-    }
-    catch (err) {
+    } catch (err) {
       const errTypeGuard = (err): err is MongoBulkWriteError => {
         return 'result' in err && 'writeErrors' in err;
       };
@@ -432,14 +484,21 @@ export class ImportService {
       if (errTypeGuard(err)) {
         return {
           result: err.result,
-          errors: Array.isArray(err.writeErrors) ? err.writeErrors : [err.writeErrors],
+          errors: Array.isArray(err.writeErrors)
+            ? err.writeErrors
+            : [err.writeErrors],
         };
       }
 
-      logger.error('Failed to execute unorderedBulkOp and the error could not handled.', err);
-      throw new Error('Failed to execute unorderedBulkOp and the error could not handled.', err);
+      logger.error(
+        'Failed to execute unorderedBulkOp and the error could not handled.',
+        err,
+      );
+      throw new Error(
+        'Failed to execute unorderedBulkOp and the error could not handled.',
+        err,
+      );
     }
-
   }
 
   /**
@@ -450,8 +509,12 @@ export class ImportService {
    * @param document document being imported
    * @returns document to be persisted
    */
-  convertDocuments<D extends Document>(collectionName: string, document: D, overwriteParams: OverwriteParams): D {
-  // Model and schema cache (optimization)
+  convertDocuments<D extends Document>(
+    collectionName: string,
+    document: D,
+    overwriteParams: OverwriteParams,
+  ): D {
+    // Model and schema cache (optimization)
     if (!this.modelCache) {
       this.modelCache = new Map();
     }
@@ -459,7 +522,7 @@ export class ImportService {
     let modelInfo = this.modelCache.get(collectionName);
     if (!modelInfo) {
       const Model = getModelFromCollectionName(collectionName);
-      const schema = (Model != null) ? Model.schema : undefined;
+      const schema = Model != null ? Model.schema : undefined;
       modelInfo = { Model, schema };
       this.modelCache.set(collectionName, modelInfo);
     }
@@ -468,26 +531,48 @@ export class ImportService {
     const convertMap = this.convertMap?.[collectionName];
 
     // Use shallow copy instead of structuredClone() when sufficient
-    const _document: D = (typeof document === 'object' && document !== null && !Array.isArray(document)) ? { ...document } : structuredClone(document);
+    const _document: D =
+      typeof document === 'object' &&
+      document !== null &&
+      !Array.isArray(document)
+        ? { ...document }
+        : structuredClone(document);
 
     Object.entries(document).forEach(([propertyName, value]) => {
       // Check if there's a custom convert function for this property, otherwise use keepOriginal
       const convertedValue = convertMap?.[propertyName];
-      const convertFunc = (convertedValue != null && typeof convertedValue === 'function') ? convertedValue : keepOriginal;
-
-      _document[propertyName] = convertFunc(value, { document, propertyName, schema });
+      const convertFunc =
+        convertedValue != null && typeof convertedValue === 'function'
+          ? convertedValue
+          : keepOriginal;
+
+      _document[propertyName] = convertFunc(value, {
+        document,
+        propertyName,
+        schema,
+      });
     });
 
     // overwrite documents with custom values
-    Object.entries(overwriteParams).forEach(([propertyName, overwriteValue]) => {
-      const value = document[propertyName];
-
-      // distinguish between null and undefined
-      if (value !== undefined) {
-        const overwriteFunc = (typeof overwriteValue === 'function') ? overwriteValue : null;
-        _document[propertyName] = (overwriteFunc != null) ? overwriteFunc(value, { document: _document, propertyName, schema }) : overwriteValue;
-      }
-    });
+    Object.entries(overwriteParams).forEach(
+      ([propertyName, overwriteValue]) => {
+        const value = document[propertyName];
+
+        // distinguish between null and undefined
+        if (value !== undefined) {
+          const overwriteFunc =
+            typeof overwriteValue === 'function' ? overwriteValue : null;
+          _document[propertyName] =
+            overwriteFunc != null
+              ? overwriteFunc(value, {
+                  document: _document,
+                  propertyName,
+                  schema,
+                })
+              : overwriteValue;
+        }
+      },
+    );
     return _document;
   }
 
@@ -501,7 +586,9 @@ export class ImportService {
    */
   validate(meta: any): void {
     if (meta.version !== getGrowiVersion()) {
-      throw new Error('The version of this GROWI and the uploaded GROWI data are not the same');
+      throw new Error(
+        'The version of this GROWI and the uploaded GROWI data are not the same',
+      );
     }
 
     // TODO: check if all migrations are completed
@@ -514,8 +601,7 @@ export class ImportService {
    */
   deleteAllZipFiles(): void {
     fs.readdirSync(this.baseDir)
-      .filter(file => path.extname(file) === '.zip')
-      .forEach(file => fs.unlinkSync(path.join(this.baseDir, file)));
+      .filter((file) => path.extname(file) === '.zip')
+      .forEach((file) => fs.unlinkSync(path.join(this.baseDir, file)));
   }
-
 }

+ 0 - 1
apps/app/src/server/service/import/index.ts

@@ -2,7 +2,6 @@ import type Crowi from '~/server/crowi';
 
 import { ImportService } from './import';
 
-
 let instance: ImportService;
 
 export const initializeImportService = (crowi: Crowi): void => {

+ 10 - 8
apps/app/src/server/service/import/overwrite-function.ts

@@ -1,18 +1,18 @@
 import { parseISO } from 'date-fns/parseISO';
 import isIsoDate from 'is-iso-date';
 import type { Schema } from 'mongoose';
-import {
-  Types, type Document,
-} from 'mongoose';
+import { type Document, Types } from 'mongoose';
 
 import loggerFactory from '~/utils/logger';
 
 const logger = loggerFactory('growi:service:import:overwrite-function');
 
-
 const { ObjectId } = Types;
 
-export type OverwriteFunction = (value: unknown, ctx: { document: Document, propertyName: string, schema?: Schema }) => unknown;
+export type OverwriteFunction = (
+  value: unknown,
+  ctx: { document: Document; propertyName: string; schema?: Schema },
+) => unknown;
 
 /**
  * keep original value
@@ -24,7 +24,10 @@ export type OverwriteFunction = (value: unknown, ctx: { document: Document, prop
  *
  * @see https://mongoosejs.com/docs/api/schematype.html#schematype_SchemaType-cast
  */
-export const keepOriginal: OverwriteFunction = (value, { document, schema, propertyName }) => {
+export const keepOriginal: OverwriteFunction = (
+  value,
+  { document, schema, propertyName },
+) => {
   if (value == null) {
     return value;
   }
@@ -40,8 +43,7 @@ export const keepOriginal: OverwriteFunction = (value, { document, schema, prope
 
     try {
       return schemaType.cast(value, document, true);
-    }
-    catch (e) {
+    } catch (e) {
       logger.warn(`Failed to cast value for ${propertyName}`, e);
       // return original value
       return value;

+ 0 - 1
apps/app/src/server/service/import/overwrite-params/attachmentFiles.chunks.ts

@@ -3,7 +3,6 @@ import { Types } from 'mongoose';
 
 import type { OverwriteParams } from '../import-settings';
 
-
 const { ObjectId } = Types;
 
 export const overwriteParams: OverwriteParams = {

+ 3 - 3
apps/app/src/server/service/import/overwrite-params/index.ts

@@ -2,7 +2,6 @@ import type { GrowiArchiveImportOption } from '~/models/admin/growi-archive-impo
 import { isImportOptionForPages } from '~/models/admin/import-option-for-pages';
 
 import type { OverwriteParams } from '../import-settings';
-
 import { overwriteParams as overwriteParamsForAttachmentFilesChunks } from './attachmentFiles.chunks';
 import { generateOverwriteParams as generateForPages } from './pages';
 import { generateOverwriteParams as generateForRevisions } from './revisions';
@@ -11,9 +10,10 @@ import { generateOverwriteParams as generateForRevisions } from './revisions';
  * generate overwrite params with overwrite-params/* modules
  */
 export const generateOverwriteParams = <OPT extends GrowiArchiveImportOption>(
-  collectionName: string, operatorUserId: string, option: OPT,
+  collectionName: string,
+  operatorUserId: string,
+  option: OPT,
 ): OverwriteParams => {
-
   switch (collectionName) {
     case 'pages':
       if (!isImportOptionForPages(option)) {

+ 4 - 1
apps/app/src/server/service/import/overwrite-params/pages.ts

@@ -7,7 +7,10 @@ import type { OverwriteParams } from '../import-settings';
 
 const { ObjectId } = Types;
 
-export const generateOverwriteParams = (operatorUserId: string, option: ImportOptionForPages): OverwriteParams => {
+export const generateOverwriteParams = (
+  operatorUserId: string,
+  option: ImportOptionForPages,
+): OverwriteParams => {
   const params: OverwriteParams = {};
 
   if (option.isOverwriteAuthorWithCurrentUser) {

+ 4 - 1
apps/app/src/server/service/import/overwrite-params/revisions.ts

@@ -6,7 +6,10 @@ import type { OverwriteParams } from '../import-settings';
 
 const { ObjectId } = Types;
 
-export const generateOverwriteParams = (operatorUserId: string, option: ImportOptionForPages): OverwriteParams => {
+export const generateOverwriteParams = (
+  operatorUserId: string,
+  option: ImportOptionForPages,
+): OverwriteParams => {
   const params: OverwriteParams = {};
 
   if (option.isOverwriteAuthorWithCurrentUser) {

+ 14 - 7
apps/app/src/server/service/in-app-notification/in-app-notification-utils.ts

@@ -1,27 +1,34 @@
-import type { IUser, IPage } from '@growi/core';
+import type { IPage, IUser } from '@growi/core';
 
 import type { IPageBulkExportJob } from '~/features/page-bulk-export/interfaces/page-bulk-export';
 import { SupportedTargetModel } from '~/interfaces/activity';
 import * as pageSerializers from '~/models/serializers/in-app-notification-snapshot/page';
 import * as pageBulkExportJobSerializers from '~/models/serializers/in-app-notification-snapshot/page-bulk-export-job';
 
-const isIPage = (targetModel: string, target: IUser | IPage | IPageBulkExportJob): target is IPage => {
+const isIPage = (
+  targetModel: string,
+  target: IUser | IPage | IPageBulkExportJob,
+): target is IPage => {
   return targetModel === SupportedTargetModel.MODEL_PAGE;
 };
 
-const isIPageBulkExportJob = (targetModel: string, target: IUser | IPage | IPageBulkExportJob): target is IPageBulkExportJob => {
+const isIPageBulkExportJob = (
+  targetModel: string,
+  target: IUser | IPage | IPageBulkExportJob,
+): target is IPageBulkExportJob => {
   return targetModel === SupportedTargetModel.MODEL_PAGE_BULK_EXPORT_JOB;
 };
 
 // snapshots are infos about the target that are displayed in the notification, which should not change on target update/deletion
-export const generateSnapshot = async(targetModel: string, target: IUser | IPage | IPageBulkExportJob): Promise<string | undefined> => {
-
+export const generateSnapshot = async (
+  targetModel: string,
+  target: IUser | IPage | IPageBulkExportJob,
+): Promise<string | undefined> => {
   let snapshot: string | undefined;
 
   if (isIPage(targetModel, target)) {
     snapshot = pageSerializers.stringifySnapshot(target);
-  }
-  else if (isIPageBulkExportJob(targetModel, target)) {
+  } else if (isIPageBulkExportJob(targetModel, target)) {
     snapshot = await pageBulkExportJobSerializers.stringifySnapshot(target);
   }
 

+ 4 - 4
apps/app/src/server/service/interfaces/export.ts

@@ -6,8 +6,8 @@ export type ZipFileStat = {
   zipFilePath: string;
   fileStat: Stats;
   innerFileStats: {
-      fileName: string;
-      collectionName: string;
-      size: number;
+    fileName: string;
+    collectionName: string;
+    size: number;
   }[];
-}
+};

+ 3 - 3
apps/app/src/server/service/interfaces/search.ts

@@ -1,4 +1,4 @@
 export type UpdateOrInsertPagesOpts = {
-  shouldEmitProgress?: boolean
-  invokeGarbageCollection?: boolean
-}
+  shouldEmitProgress?: boolean;
+  invokeGarbageCollection?: boolean;
+};

+ 2 - 2
apps/app/src/server/service/normalize-data/convert-null-to-empty-granted-arrays.ts

@@ -1,9 +1,9 @@
 import type { IPageHasId } from '@growi/core';
 import mongoose from 'mongoose';
 
-import { type PageModel } from '~/server/models/page';
+import type { PageModel } from '~/server/models/page';
 
-export const convertNullToEmptyGrantedArrays = async(): Promise<void> => {
+export const convertNullToEmptyGrantedArrays = async (): Promise<void> => {
   const Page = mongoose.model<IPageHasId, PageModel>('Page');
 
   const requests = [

+ 15 - 7
apps/app/src/server/service/normalize-data/convert-revision-page-id-to-objectid.ts

@@ -1,19 +1,25 @@
 // see: https://redmine.weseek.co.jp/issues/150649
 
-import { type IRevisionHasId } from '@growi/core';
+import type { IRevisionHasId } from '@growi/core';
 import type { FilterQuery, UpdateQuery } from 'mongoose';
 import mongoose from 'mongoose';
 
-import type { IRevisionDocument } from '~/server/models/revision';
-import { type IRevisionModel } from '~/server/models/revision';
+import type {
+  IRevisionDocument,
+  IRevisionModel,
+} from '~/server/models/revision';
 import loggerFactory from '~/utils/logger';
 
-const logger = loggerFactory('growi:service:NormalizeData:convert-revision-page-id-to-string');
+const logger = loggerFactory(
+  'growi:service:NormalizeData:convert-revision-page-id-to-string',
+);
 
-export const convertRevisionPageIdToObjectId = async(): Promise<void> => {
+export const convertRevisionPageIdToObjectId = async (): Promise<void> => {
   const Revision = mongoose.model<IRevisionHasId, IRevisionModel>('Revision');
 
-  const filter: FilterQuery<IRevisionDocument> = { pageId: { $type: 'string' } };
+  const filter: FilterQuery<IRevisionDocument> = {
+    pageId: { $type: 'string' },
+  };
 
   const update: UpdateQuery<IRevisionDocument> = [
     {
@@ -31,6 +37,8 @@ export const convertRevisionPageIdToObjectId = async(): Promise<void> => {
 
   await Revision.updateMany(filter, update);
 
-  const explain = await Revision.updateMany(filter, update).explain('queryPlanner');
+  const explain = await Revision.updateMany(filter, update).explain(
+    'queryPlanner',
+  );
   logger.debug(explain);
 };

+ 28 - 24
apps/app/src/server/service/normalize-data/delete-vector-stores-orphaned-from-ai-assistant.ts

@@ -4,31 +4,35 @@ import { isAiEnabled } from '~/features/openai/server/services/is-ai-enabled';
 import { getOpenaiService } from '~/features/openai/server/services/openai';
 import loggerFactory from '~/utils/logger';
 
-const logger = loggerFactory('growi:service:normalize-data:delete-vector-stores-orphaned-from-ai-assistant');
+const logger = loggerFactory(
+  'growi:service:normalize-data:delete-vector-stores-orphaned-from-ai-assistant',
+);
 
-export const deleteVectorStoresOrphanedFromAiAssistant = async(): Promise<void> => {
-  if (!isAiEnabled()) {
-    return;
-  }
+export const deleteVectorStoresOrphanedFromAiAssistant =
+  async (): Promise<void> => {
+    if (!isAiEnabled()) {
+      return;
+    }
 
-  // Identify VectorStoreRelation documents not related to existing aiAssistant documents as those used by old knowledge assistant
-  // Retrieve these VectorStoreRelation documents used by old knowledge assistant
-  // Only one active ({isDeleted: false}) VectorStoreRelation document should exist for old knowledge assistant, so only one should be returned
-  const aiAssistantVectorStoreIds = await AiAssistantModel.distinct('vectorStore');
-  const nonDeletedLegacyKnowledgeAssistantVectorStoreRelations = await VectorStoreRelationModel.find({
-    _id: { $nin: aiAssistantVectorStoreIds },
-    isDeleted: false,
-  });
+    // Identify VectorStoreRelation documents not related to existing aiAssistant documents as those used by old knowledge assistant
+    // Retrieve these VectorStoreRelation documents used by old knowledge assistant
+    // Only one active ({isDeleted: false}) VectorStoreRelation document should exist for old knowledge assistant, so only one should be returned
+    const aiAssistantVectorStoreIds =
+      await AiAssistantModel.distinct('vectorStore');
+    const nonDeletedLegacyKnowledgeAssistantVectorStoreRelations =
+      await VectorStoreRelationModel.find({
+        _id: { $nin: aiAssistantVectorStoreIds },
+        isDeleted: false,
+      });
 
-  // Logically delete only the VectorStore entities, leaving related documents to be automatically deleted by cron job
-  const openaiService = getOpenaiService();
-  for await (const vectorStoreRelation of nonDeletedLegacyKnowledgeAssistantVectorStoreRelations) {
-    try {
-      const vectorStoreFileRelationId = vectorStoreRelation._id;
-      await openaiService?.deleteVectorStore(vectorStoreFileRelationId);
-    }
-    catch (err) {
-      logger.error(err);
+    // Logically delete only the VectorStore entities, leaving related documents to be automatically deleted by cron job
+    const openaiService = getOpenaiService();
+    for await (const vectorStoreRelation of nonDeletedLegacyKnowledgeAssistantVectorStoreRelations) {
+      try {
+        const vectorStoreFileRelationId = vectorStoreRelation._id;
+        await openaiService?.deleteVectorStore(vectorStoreFileRelationId);
+      } catch (err) {
+        logger.error(err);
+      }
     }
-  }
-};
+  };

+ 1 - 1
apps/app/src/server/service/normalize-data/index.ts

@@ -8,7 +8,7 @@ import { renameDuplicateRootPages } from './rename-duplicate-root-pages';
 
 const logger = loggerFactory('growi:service:NormalizeData');
 
-export const normalizeData = async(): Promise<void> => {
+export const normalizeData = async (): Promise<void> => {
   await renameDuplicateRootPages();
   await convertRevisionPageIdToObjectId();
   await normalizeExpiredAtForThreadRelations();

+ 3 - 3
apps/app/src/server/service/normalize-data/rename-duplicate-root-pages.ts

@@ -1,11 +1,11 @@
 // see: https://github.com/growilabs/growi/issues/8337
 
-import { type IPageHasId } from '@growi/core';
+import type { IPageHasId } from '@growi/core';
 import mongoose from 'mongoose';
 
-import { type PageModel } from '~/server/models/page';
+import type { PageModel } from '~/server/models/page';
 
-export const renameDuplicateRootPages = async(): Promise<void> => {
+export const renameDuplicateRootPages = async (): Promise<void> => {
   const Page = mongoose.model<IPageHasId, PageModel>('Page');
   const rootPages = await Page.find({ path: '/' }).sort({ createdAt: 1 });
 

+ 0 - 8
biome.json

@@ -31,14 +31,6 @@
       "!apps/app/src/client",
       "!apps/app/src/server/middlewares",
       "!apps/app/src/server/routes/apiv3/*.js",
-      "!apps/app/src/server/service/file-uploader",
-      "!apps/app/src/server/service/global-notification",
-      "!apps/app/src/server/service/growi-bridge",
-      "!apps/app/src/server/service/growi-info",
-      "!apps/app/src/server/service/import",
-      "!apps/app/src/server/service/in-app-notification",
-      "!apps/app/src/server/service/interfaces",
-      "!apps/app/src/server/service/normalize-data",
       "!apps/app/src/server/service/page"
     ]
   },