Просмотр исходного кода

Merge pull request #10329 from growilabs/support/investigate-memory-leak-by-yuki

support: Improve memory leak
mergify[bot] 6 месяцев назад
Родитель
Сommit
cadbf18e52

+ 1 - 1
apps/app/package.json

@@ -166,7 +166,7 @@
     "mkdirp": "^1.0.3",
     "mongodb": "^4.17.2",
     "mongoose": "^6.13.6",
-    "mongoose-gridfs": "^1.2.42",
+    "mongoose-gridfs": "^1.3.0",
     "mongoose-paginate-v2": "^1.3.9",
     "mongoose-unique-validator": "^2.0.3",
     "multer": "~1.4.0",

+ 22 - 31
apps/app/src/client/components/Admin/ImportData/GrowiArchive/ImportCollectionItem.jsx

@@ -1,7 +1,9 @@
 import React from 'react';
 
 import PropTypes from 'prop-types';
-import { Progress } from 'reactstrap';
+import {
+  Progress, UncontrolledDropdown, DropdownToggle, DropdownMenu, DropdownItem,
+} from 'reactstrap';
 
 import { GrowiArchiveImportOption } from '~/models/admin/growi-archive-import-option';
 
@@ -49,6 +51,8 @@ export default class ImportCollectionItem extends React.Component {
     onOptionChange(collectionName, { mode });
   }
 
+  // No toggle state needed when using UncontrolledDropdown
+
   configButtonClickedHandler() {
     const { collectionName, onConfigButtonClicked } = this.props;
 
@@ -103,40 +107,28 @@ export default class ImportCollectionItem extends React.Component {
     const {
       collectionName, option, isImporting,
     } = this.props;
-
-    const attrMap = MODE_ATTR_MAP[option.mode];
-    const btnColor = `btn-${attrMap.color}`;
-
+    const currentMode = option?.mode || 'insert';
+    const attrMap = MODE_ATTR_MAP[currentMode];
     const modes = MODE_RESTRICTED_COLLECTION[collectionName] || Object.keys(MODE_ATTR_MAP);
 
     return (
       <span className="d-inline-flex align-items-center">
         Mode:&nbsp;
-        <div className="dropdown d-inline-block">
-          <button
-            className={`btn ${btnColor} btn-sm dropdown-toggle`}
-            type="button"
-            id="ddmMode"
-            disabled={isImporting}
-            data-bs-toggle="dropdown"
-            aria-haspopup="true"
-            aria-expanded="true"
-          >
-            {this.renderModeLabel(option.mode)}
-            <span className="caret ms-2"></span>
-          </button>
-          <ul className="dropdown-menu" aria-labelledby="ddmMode">
-            { modes.map((mode) => {
-              return (
-                <li key={`buttonMode_${mode}`}>
-                  <button type="button" className="dropdown-item" role="button" onClick={() => this.modeSelectedHandler(mode)}>
-                    {this.renderModeLabel(mode, true)}
-                  </button>
-                </li>
-              );
-            }) }
-          </ul>
-        </div>
+        <UncontrolledDropdown size="sm" className="d-inline-block">
+          <DropdownToggle color={attrMap.color} caret disabled={isImporting} id={`ddmMode-${collectionName}`}>
+            {this.renderModeLabel(currentMode)}
+          </DropdownToggle>
+          <DropdownMenu>
+            {modes.map(mode => (
+              <DropdownItem
+                key={`buttonMode_${mode}`}
+                onClick={() => this.modeSelectedHandler(mode)}
+              >
+                {this.renderModeLabel(mode, true)}
+              </DropdownItem>
+            ))}
+          </DropdownMenu>
+        </UncontrolledDropdown>
       </span>
     );
   }
@@ -190,7 +182,6 @@ export default class ImportCollectionItem extends React.Component {
         }
       </div>
     );
-
   }
 
   render() {

+ 4 - 2
apps/app/src/server/crowi/index.js

@@ -32,7 +32,7 @@ import instanciateExportService from '../service/export';
 import instanciateExternalAccountService from '../service/external-account';
 import { FileUploader, getUploader } from '../service/file-uploader'; // eslint-disable-line no-unused-vars
 import { G2GTransferPusherService, G2GTransferReceiverService } from '../service/g2g-transfer';
-import GrowiBridgeService from '../service/growi-bridge';
+import { GrowiBridgeService } from '../service/growi-bridge';
 import { initializeImportService } from '../service/import';
 import { InstallerService } from '../service/installer';
 import { normalizeData } from '../service/normalize-data';
@@ -82,6 +82,9 @@ class Crowi {
   /** @type {import('../service/growi-info').GrowiInfoService} */
   growiInfoService;
 
+  /** @type {import('../service/growi-bridge').GrowiBridgeService} */
+  growiBridgeService;
+
   /** @type {import('../service/page').IPageService} */
   pageService;
 
@@ -134,7 +137,6 @@ class Crowi {
     this.aclService = null;
     this.appService = null;
     this.fileUploadService = null;
-    this.growiBridgeService = null;
     this.pluginService = null;
     this.searchService = null;
     this.socketIoService = null;

+ 3 - 2
apps/app/src/server/routes/apiv3/g2g-transfer.ts

@@ -1,19 +1,20 @@
 import { createReadStream } from 'fs';
 import path from 'path';
 
+import { SCOPE } from '@growi/core/dist/interfaces';
 import { ErrorV3 } from '@growi/core/dist/models';
 import type { NextFunction, Request, Router } from 'express';
 import express from 'express';
 import { body } from 'express-validator';
 import multer from 'multer';
 
-import { SCOPE } from '@growi/core/dist/interfaces';
 import { accessTokenParser } from '~/server/middlewares/access-token-parser';
 import { isG2GTransferError } from '~/server/models/vo/g2g-transfer-error';
 import { configManager } from '~/server/service/config-manager';
 import { exportService } from '~/server/service/export';
 import type { IDataGROWIInfo } from '~/server/service/g2g-transfer';
 import { X_GROWI_TRANSFER_KEY_HEADER_NAME } from '~/server/service/g2g-transfer';
+import type { ImportSettings } from '~/server/service/import';
 import { getImportService } from '~/server/service/import';
 import loggerFactory from '~/utils/logger';
 import { TransferKey } from '~/utils/vo/transfer-key';
@@ -313,7 +314,7 @@ module.exports = (crowi: Crowi): Router => {
     /*
      * generate maps of ImportSettings to import
      */
-    let importSettingsMap;
+    let importSettingsMap: Map<string, ImportSettings>;
     try {
       importSettingsMap = g2gTransferReceiverService.getImportSettingMap(innerFileStats, optionsMap, operatorUserId);
     }

+ 23 - 17
apps/app/src/server/routes/apiv3/import.js → apps/app/src/server/routes/apiv3/import.ts

@@ -1,14 +1,18 @@
+import { SCOPE } from '@growi/core/dist/interfaces';
 import { ErrorV3 } from '@growi/core/dist/models';
 
 import { SupportedAction } from '~/interfaces/activity';
-import { SCOPE } from '@growi/core/dist/interfaces';
+import type { GrowiArchiveImportOption } from '~/models/admin/growi-archive-import-option';
+import type Crowi from '~/server/crowi';
 import { accessTokenParser } from '~/server/middlewares/access-token-parser';
+import type { ImportSettings } from '~/server/service/import';
 import { getImportService } from '~/server/service/import';
 import { generateOverwriteParams } from '~/server/service/import/overwrite-params';
 import loggerFactory from '~/utils/logger';
 
 import { generateAddActivityMiddleware } from '../../middlewares/add-activity';
 
+
 const logger = loggerFactory('growi:routes:apiv3:import'); // eslint-disable-line no-unused-vars
 
 const path = require('path');
@@ -122,14 +126,13 @@ const router = express.Router();
  *                  type: integer
  *                  nullable: true
  */
-/** @param {import('~/server/crowi').default} crowi Crowi instance */
-export default function route(crowi) {
+export default function route(crowi: Crowi): void {
   const { growiBridgeService, socketIoService } = crowi;
-  const importService = getImportService(crowi);
+  const importService = getImportService();
 
   const loginRequired = require('../../middlewares/login-required')(crowi);
   const adminRequired = require('../../middlewares/admin-required')(crowi);
-  const addActivity = generateAddActivityMiddleware(crowi);
+  const addActivity = generateAddActivityMiddleware();
 
   const adminEvent = crowi.event('admin');
   const activityEvent = crowi.event('activity');
@@ -312,18 +315,22 @@ export default function route(crowi) {
     /*
      * unzip, parse
      */
-    let meta = null;
-    let fileStatsToImport = null;
+    let meta;
+    let fileStatsToImport;
     try {
       // unzip
       await importService.unzip(zipFile);
 
       // eslint-disable-next-line no-unused-vars
-      const { meta: parsedMeta, fileStats, innerFileStats } = await growiBridgeService.parseZipFile(zipFile);
-      meta = parsedMeta;
+      const parseZipResult = await growiBridgeService.parseZipFile(zipFile);
+      if (parseZipResult == null) {
+        throw new Error('parseZipFile returns null');
+      }
+
+      meta = parseZipResult.meta;
 
       // filter innerFileStats
-      fileStatsToImport = innerFileStats.filter(({ fileName, collectionName, size }) => {
+      fileStatsToImport = parseZipResult.innerFileStats.filter(({ collectionName }) => {
         return collections.includes(collectionName);
       });
     }
@@ -346,21 +353,20 @@ export default function route(crowi) {
     }
 
     // generate maps of ImportSettings to import
-    const importSettingsMap = {};
+    // Use the Map for a potential fix for the code scanning alert no. 895: Prototype-polluting assignment
+    const importSettingsMap = new Map<string, ImportSettings>();
     fileStatsToImport.forEach(({ fileName, collectionName }) => {
       // instanciate GrowiArchiveImportOption
-      /** @type {import('~/models/admin/growi-archive-import-option').GrowiArchiveImportOption} */
-      const option = options.find(opt => opt.collectionName === collectionName);
+      const option: GrowiArchiveImportOption = options.find(opt => opt.collectionName === collectionName);
 
       // generate options
-      /** @type {import('~/server/service/import').ImportSettings} */
       const importSettings = {
         mode: option.mode,
         jsonFileName: fileName,
         overwriteParams: generateOverwriteParams(collectionName, req.user._id, option),
-      };
+      } satisfies ImportSettings;
 
-      importSettingsMap[collectionName] = importSettings;
+      importSettingsMap.set(collectionName, importSettings);
     });
 
     /*
@@ -411,7 +417,7 @@ export default function route(crowi) {
     async(req, res) => {
       const { file } = req;
       const zipFile = importService.getFile(file.filename);
-      let data = null;
+      let data;
 
       try {
         data = await growiBridgeService.parseZipFile(zipFile);

+ 3 - 3
apps/app/src/server/routes/attachment/get.ts

@@ -11,7 +11,7 @@ import type { CrowiProperties, CrowiRequest } from '~/interfaces/crowi-request';
 import { ResponseMode, type ExpressHttpHeader, type RespondOptions } from '~/server/interfaces/attachment';
 import {
   type FileUploader,
-  toExpressHttpHeaders, ContentHeaders, applyHeaders,
+  toExpressHttpHeaders, applyHeaders, createContentHeaders,
 } from '~/server/service/file-uploader';
 import loggerFactory from '~/utils/logger';
 
@@ -110,8 +110,8 @@ const respondForRedirectMode = async(res: Response, fileUploadService: FileUploa
 const respondForRelayMode = async(res: Response, fileUploadService: FileUploader, attachment: IAttachmentDocument, opts?: RespondOptions): Promise<void> => {
   // apply content-* headers before response
   const isDownload = opts?.download ?? false;
-  const contentHeaders = new ContentHeaders(attachment, { inline: !isDownload });
-  applyHeaders(res, contentHeaders.toExpressHttpHeaders());
+  const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
+  applyHeaders(res, contentHeaders);
 
   try {
     const readable = await fileUploadService.findDeliveryFile(attachment);

+ 5 - 0
apps/app/src/server/service/config-manager/config-definition.ts

@@ -50,6 +50,7 @@ export const CONFIG_KEYS = [
   'app:aiEnabled',
   'app:publishOpenAPI',
   'app:maxFileSize',
+  'app:fileUploadTimeout',
   'app:fileUploadTotalLimit',
   'app:fileUploadDisabled',
   'app:elasticsearchVersion',
@@ -429,6 +430,10 @@ export const CONFIG_DEFINITIONS = {
     envVarName: 'MAX_FILE_SIZE',
     defaultValue: Infinity,
   }),
+  'app:fileUploadTimeout': defineConfig<number>({
+    envVarName: 'FILE_UPLOAD_TIMEOUT',
+    defaultValue: 10 * 60 * 1000, // 10 minutes
+  }),
   'app:fileUploadTotalLimit': defineConfig<number>({
     envVarName: 'FILE_UPLOAD_TOTAL_LIMIT',
     defaultValue: Infinity,

+ 1 - 1
apps/app/src/server/service/export.ts

@@ -13,7 +13,7 @@ import CollectionProgressingStatus from '../models/vo/collection-progressing-sta
 
 import type AppService from './app';
 import { configManager } from './config-manager';
-import type GrowiBridgeService from './growi-bridge';
+import type { GrowiBridgeService } from './growi-bridge';
 import { growiInfoService } from './growi-info';
 import type { ZipFileStat } from './interfaces/export';
 

+ 59 - 20
apps/app/src/server/service/file-uploader/aws/index.ts

@@ -28,7 +28,7 @@ import { configManager } from '../../config-manager';
 import {
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
 } from '../file-uploader';
-import { ContentHeaders } from '../utils';
+import { createContentHeaders, getContentHeaderValue } from '../utils';
 
 import { AwsMultipartUploader } from './multipart-uploader';
 
@@ -44,6 +44,8 @@ interface FileMeta {
   size: number;
 }
 
+// Cache holder to avoid repeated instantiation of S3 client
+let cachedS3Client: { configKey: string, client: S3Client } | null = null;
 const isFileExists = async(s3: S3Client, params: HeadObjectCommandInput) => {
   try {
     await s3.send(new HeadObjectCommand(params));
@@ -86,12 +88,21 @@ const getS3Bucket = (): NonBlankString | undefined => {
 };
 
 const S3Factory = (): S3Client => {
+  // Cache key based on configuration values to detect changes
   const accessKeyId = configManager.getConfig('aws:s3AccessKeyId');
   const secretAccessKey = configManager.getConfig('aws:s3SecretAccessKey');
   const s3Region = toNonBlankStringOrUndefined(configManager.getConfig('aws:s3Region')); // Blank strings may remain in the DB, so convert with toNonBlankStringOrUndefined for safety
   const s3CustomEndpoint = toNonBlankStringOrUndefined(configManager.getConfig('aws:s3CustomEndpoint'));
 
-  return new S3Client({
+  const configKey = `${accessKeyId ?? ''}|${secretAccessKey ?? ''}|${s3Region ?? ''}|${s3CustomEndpoint ?? ''}`;
+
+  // Return cached client if configuration hasn't changed
+  if (cachedS3Client != null && cachedS3Client.configKey === configKey) {
+    return cachedS3Client.client;
+  }
+
+  // Create new client instance with connection pooling optimizations
+  const client = new S3Client({
     credentials: accessKeyId != null && secretAccessKey != null
       ? {
         accessKeyId,
@@ -102,6 +113,10 @@ const S3Factory = (): S3Client => {
     endpoint: s3CustomEndpoint,
     forcePathStyle: s3CustomEndpoint != null, // s3ForcePathStyle renamed to forcePathStyle in v3
   });
+
+  // Cache the new client
+  cachedS3Client = { configKey, client };
+  return client;
 };
 
 const getFilePathOnStorage = (attachment: IAttachmentDocument) => {
@@ -177,17 +192,38 @@ class AwsFileUploader extends AbstractFileUploader {
     const s3 = S3Factory();
 
     const filePath = getFilePathOnStorage(attachment);
-    const contentHeaders = new ContentHeaders(attachment);
+    const contentHeaders = createContentHeaders(attachment);
 
-    await s3.send(new PutObjectCommand({
-      Bucket: getS3Bucket(),
-      Key: filePath,
-      Body: readable,
-      ACL: getS3PutObjectCannedAcl(),
-      // put type and the file name for reference information when uploading
-      ContentType: contentHeaders.contentType?.value.toString(),
-      ContentDisposition: contentHeaders.contentDisposition?.value.toString(),
-    }));
+    try {
+      const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+
+      await s3.send(
+        new PutObjectCommand({
+          Bucket: getS3Bucket(),
+          Key: filePath,
+          Body: readable,
+          ACL: getS3PutObjectCannedAcl(),
+          // put type and the file name for reference information when uploading
+          ContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+          ContentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
+        }),
+        { abortSignal: AbortSignal.timeout(uploadTimeout) },
+      );
+
+      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
+    }
+    catch (error) {
+      // Handle timeout error specifically
+      if (error.name === 'AbortError') {
+        logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
+      }
+      else {
+        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      }
+      // Re-throw the error to be handled by the caller.
+      // The pipeline automatically handles stream cleanup on error.
+      throw error;
+    }
   }
 
   /**
@@ -205,7 +241,7 @@ class AwsFileUploader extends AbstractFileUploader {
       throw new Error('AWS is not configured.');
     }
 
-    const s3 = S3Factory();
+    const s3 = S3Factory(); // Use singleton client
     const filePath = getFilePathOnStorage(attachment);
 
     const params = {
@@ -220,20 +256,20 @@ class AwsFileUploader extends AbstractFileUploader {
     }
 
     try {
-      const body = (await s3.send(new GetObjectCommand(params))).Body;
+      const response = await s3.send(new GetObjectCommand(params));
+      const body = response.Body;
 
       if (body == null) {
         throw new Error(`S3 returned null for the Attachment (${filePath})`);
       }
 
-      // eslint-disable-next-line no-nested-ternary
       return 'stream' in body
         ? body.stream() as unknown as NodeJS.ReadableStream // get stream from Blob and cast force
         : body as unknown as NodeJS.ReadableStream; // cast force
     }
     catch (err) {
-      logger.error(err);
-      throw new Error(`Coudn't get file from AWS for the Attachment (${attachment._id.toString()})`);
+      logger.error(`Failed to get file from AWS S3 for attachment ${attachment._id.toString()}:`, err);
+      throw new Error(`Couldn't get file from AWS for the Attachment (${attachment._id.toString()})`);
     }
   }
 
@@ -252,12 +288,12 @@ class AwsFileUploader extends AbstractFileUploader {
     // issue signed url (default: expires 120 seconds)
     // https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getSignedUrl-property
     const isDownload = opts?.download ?? false;
-    const contentHeaders = new ContentHeaders(attachment, { inline: !isDownload });
+    const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
     const params: GetObjectCommandInput = {
       Bucket: getS3Bucket(),
       Key: filePath,
-      ResponseContentType: contentHeaders.contentType?.value.toString(),
-      ResponseContentDisposition: contentHeaders.contentDisposition?.value.toString(),
+      ResponseContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+      ResponseContentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
     };
     const signedUrl = await getSignedUrl(s3, new GetObjectCommand(params), {
       expiresIn: lifetimeSecForTemporaryUrl,
@@ -282,12 +318,15 @@ class AwsFileUploader extends AbstractFileUploader {
         Key: uploadKey,
         UploadId: uploadId,
       }));
+      logger.debug(`Successfully aborted multipart upload: uploadKey=${uploadKey}, uploadId=${uploadId}`);
     }
     catch (e) {
       // allow duplicate abort requests to ensure abortion
       if (e.response?.status !== 404) {
+        logger.error(`Failed to abort multipart upload: uploadKey=${uploadKey}, uploadId=${uploadId}`, e);
         throw e;
       }
+      logger.debug(`Multipart upload already aborted: uploadKey=${uploadKey}, uploadId=${uploadId}`);
     }
   }
 

+ 81 - 16
apps/app/src/server/service/file-uploader/azure.ts

@@ -29,7 +29,7 @@ import { configManager } from '../config-manager';
 import {
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
 } from './file-uploader';
-import { ContentHeaders } from './utils';
+import { createContentHeaders, getContentHeaderValue } from './utils';
 
 const urljoin = require('url-join');
 
@@ -45,6 +45,11 @@ type AzureConfig = {
   containerName: string,
 }
 
+// Cache holders to avoid repeated instantiation of credential and clients
+let cachedCredential: { key: string, credential: TokenCredential } | null = null;
+let cachedBlobServiceClient: { key: string, client: BlobServiceClient } | null = null;
+let cachedContainerClient: { key: string, client: ContainerClient } | null = null;
+
 
 function getAzureConfig(): AzureConfig {
   const accountName = configManager.getConfig('azure:storageAccountName');
@@ -61,6 +66,7 @@ function getAzureConfig(): AzureConfig {
 }
 
 function getCredential(): TokenCredential {
+  // Build cache key from credential-related configs
   const tenantId = toNonBlankStringOrUndefined(configManager.getConfig('azure:tenantId'));
   const clientId = toNonBlankStringOrUndefined(configManager.getConfig('azure:clientId'));
   const clientSecret = toNonBlankStringOrUndefined(configManager.getConfig('azure:clientSecret'));
@@ -69,13 +75,52 @@ function getCredential(): TokenCredential {
     throw new Error(`Azure Blob Storage missing required configuration: tenantId=${tenantId}, clientId=${clientId}, clientSecret=${clientSecret}`);
   }
 
-  return new ClientSecretCredential(tenantId, clientId, clientSecret);
+  const key = `${tenantId}|${clientId}|${clientSecret}`;
+
+  // Reuse cached credential when config has not changed
+  if (cachedCredential != null && cachedCredential.key === key) {
+    return cachedCredential.credential;
+  }
+
+  const credential = new ClientSecretCredential(tenantId, clientId, clientSecret);
+  cachedCredential = { key, credential };
+  return credential;
+}
+
+function getBlobServiceClient(): BlobServiceClient {
+  const { accountName } = getAzureConfig();
+  // Include credential cache key to ensure we re-create if cred changed
+  const credential = getCredential();
+  const credentialKey = (cachedCredential?.key) ?? 'unknown-cred';
+  const key = `${accountName}|${credentialKey}`;
+
+  if (cachedBlobServiceClient != null && cachedBlobServiceClient.key === key) {
+    return cachedBlobServiceClient.client;
+  }
+
+  // Use keep-alive to minimize socket churn; reuse client across calls
+  const client = new BlobServiceClient(
+    `https://${accountName}.blob.core.windows.net`,
+    credential,
+    { keepAliveOptions: { enable: true } },
+  );
+  cachedBlobServiceClient = { key, client };
+  return client;
 }
 
 async function getContainerClient(): Promise<ContainerClient> {
   const { accountName, containerName } = getAzureConfig();
-  const blobServiceClient = new BlobServiceClient(`https://${accountName}.blob.core.windows.net`, getCredential());
-  return blobServiceClient.getContainerClient(containerName);
+  const credentialKey = (cachedCredential?.key) ?? 'unknown-cred';
+  const key = `${accountName}|${containerName}|${credentialKey}`;
+
+  if (cachedContainerClient != null && cachedContainerClient.key === key) {
+    return cachedContainerClient.client;
+  }
+
+  const blobServiceClient = getBlobServiceClient();
+  const client = blobServiceClient.getContainerClient(containerName);
+  cachedContainerClient = { key, client };
+  return client;
 }
 
 function getFilePathOnStorage(attachment: IAttachmentDocument) {
@@ -132,15 +177,34 @@ class AzureFileUploader extends AbstractFileUploader {
     const filePath = getFilePathOnStorage(attachment);
     const containerClient = await getContainerClient();
     const blockBlobClient: BlockBlobClient = containerClient.getBlockBlobClient(filePath);
-    const contentHeaders = new ContentHeaders(attachment);
+    const contentHeaders = createContentHeaders(attachment);
 
-    await blockBlobClient.uploadStream(readable, undefined, undefined, {
-      blobHTTPHeaders: {
-        // put type and the file name for reference information when uploading
-        blobContentType: contentHeaders.contentType?.value.toString(),
-        blobContentDisposition: contentHeaders.contentDisposition?.value.toString(),
-      },
-    });
+    try {
+      const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+
+      await blockBlobClient.uploadStream(readable, undefined, undefined, {
+        blobHTTPHeaders: {
+          // put type and the file name for reference information when uploading
+          blobContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+          blobContentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
+        },
+        abortSignal: AbortSignal.timeout(uploadTimeout),
+      });
+
+      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
+    }
+    catch (error) {
+      // Handle timeout error specifically
+      if (error.name === 'AbortError') {
+        logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
+      }
+      else {
+        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      }
+      // Re-throw the error to be handled by the caller.
+      // The pipeline automatically handles stream cleanup on error.
+      throw error;
+    }
   }
 
   /**
@@ -202,7 +266,8 @@ class AzureFileUploader extends AbstractFileUploader {
 
     const sasToken = await (async() => {
       const { accountName, containerName } = getAzureConfig();
-      const blobServiceClient = new BlobServiceClient(`https://${accountName}.blob.core.windows.net`, getCredential());
+      // Reuse the same BlobServiceClient (singleton)
+      const blobServiceClient = getBlobServiceClient();
 
       const now = Date.now();
       const startsOn = new Date(now - 30 * 1000);
@@ -210,7 +275,7 @@ class AzureFileUploader extends AbstractFileUploader {
       const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);
 
       const isDownload = opts?.download ?? false;
-      const contentHeaders = new ContentHeaders(attachment, { inline: !isDownload });
+      const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
 
       // https://github.com/Azure/azure-sdk-for-js/blob/d4d55f73/sdk/storage/storage-blob/src/ContainerSASPermissions.ts#L24
       // r:read, a:add, c:create, w:write, d:delete, l:list
@@ -221,8 +286,8 @@ class AzureFileUploader extends AbstractFileUploader {
         protocol: SASProtocol.HttpsAndHttp,
         startsOn,
         expiresOn,
-        contentType: contentHeaders.contentType?.value.toString(),
-        contentDisposition: contentHeaders.contentDisposition?.value.toString(),
+        contentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+        contentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
       };
 
       return generateBlobSASQueryParameters(sasOptions, userDelegationKey, accountName).toString();

+ 34 - 10
apps/app/src/server/service/file-uploader/gcs/index.ts

@@ -17,7 +17,7 @@ import { configManager } from '../../config-manager';
 import {
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
 } from '../file-uploader';
-import { ContentHeaders } from '../utils';
+import { createContentHeaders, getContentHeaderValue } from '../utils';
 
 import { GcsMultipartUploader } from './multipart-uploader';
 
@@ -131,14 +131,38 @@ class GcsFileUploader extends AbstractFileUploader {
     const gcs = getGcsInstance();
     const myBucket = gcs.bucket(getGcsBucket());
     const filePath = getFilePathOnStorage(attachment);
-    const contentHeaders = new ContentHeaders(attachment);
+    const contentHeaders = createContentHeaders(attachment);
 
     const file = myBucket.file(filePath);
-
-    await pipeline(readable, file.createWriteStream({
+    const writeStream = file.createWriteStream({
       // put type and the file name for reference information when uploading
-      contentType: contentHeaders.contentType?.value.toString(),
-    }));
+      contentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+    });
+
+    try {
+      const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+
+      // Use AbortSignal.timeout() for robust timeout handling (Node.js 16+)
+      await pipeline(
+        readable,
+        writeStream,
+        { signal: AbortSignal.timeout(uploadTimeout) },
+      );
+
+      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
+    }
+    catch (error) {
+      // Handle timeout error specifically
+      if (error.name === 'AbortError') {
+        logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
+      }
+      else {
+        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      }
+      // Re-throw the error to be handled by the caller.
+      // The pipeline automatically handles stream cleanup on error.
+      throw error;
+    }
   }
 
   /**
@@ -172,7 +196,7 @@ class GcsFileUploader extends AbstractFileUploader {
     }
     catch (err) {
       logger.error(err);
-      throw new Error(`Coudn't get file from AWS for the Attachment (${attachment._id.toString()})`);
+      throw new Error(`Coudn't get file from GCS for the Attachment (${attachment._id.toString()})`);
     }
   }
 
@@ -193,12 +217,12 @@ class GcsFileUploader extends AbstractFileUploader {
     // issue signed url (default: expires 120 seconds)
     // https://cloud.google.com/storage/docs/access-control/signed-urls
     const isDownload = opts?.download ?? false;
-    const contentHeaders = new ContentHeaders(attachment, { inline: !isDownload });
+    const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
     const [signedUrl] = await file.getSignedUrl({
       action: 'read',
       expires: Date.now() + lifetimeSecForTemporaryUrl * 1000,
-      responseType: contentHeaders.contentType?.value.toString(),
-      responseDisposition: contentHeaders.contentDisposition?.value.toString(),
+      responseType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+      responseDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
     });
 
     return {

+ 114 - 52
apps/app/src/server/service/file-uploader/gridfs.ts

@@ -12,20 +12,69 @@ import loggerFactory from '~/utils/logger';
 import { configManager } from '../config-manager';
 
 import { AbstractFileUploader, type TemporaryUrl, type SaveFileParam } from './file-uploader';
-import { ContentHeaders } from './utils';
+import { createContentHeaders, getContentHeaderValue } from './utils';
 
 const logger = loggerFactory('growi:service:fileUploaderGridfs');
 
-
 const COLLECTION_NAME = 'attachmentFiles';
 const CHUNK_COLLECTION_NAME = `${COLLECTION_NAME}.chunks`;
 
-// instantiate mongoose-gridfs
-const AttachmentFile = createModel({
-  modelName: COLLECTION_NAME,
-  bucketName: COLLECTION_NAME,
-  connection: mongoose.connection,
-});
+type PromisifiedUtils = {
+  read: (options?: object) => Readable;
+  // eslint-disable-next-line @typescript-eslint/ban-types
+  write: (file: object, stream: Readable, done?: Function) => void;
+  // eslint-disable-next-line @typescript-eslint/ban-types
+  unlink: (file: object, done?: Function) => void;
+  promisifiedWrite: (file: object, readable: Readable) => Promise<any>;
+  promisifiedUnlink: (file: object) => Promise<any>;
+}
+
+type AttachmentFileModel = mongoose.Model<any> & PromisifiedUtils;
+
+// Cache holders to avoid repeated model creation and manage lifecycle
+let cachedAttachmentFileModel: AttachmentFileModel;
+let cachedChunkCollection: mongoose.Collection;
+let cachedConnection: mongoose.Connection; // Track the connection instance itself
+
+/**
+ * Initialize GridFS models with connection instance monitoring
+ * This prevents memory leaks from repeated model creation
+ */
+function initializeGridFSModels(): { attachmentFileModel: AttachmentFileModel, chunkCollection: mongoose.Collection } {
+  // Check if we can reuse cached models by comparing connection instance
+  if (cachedAttachmentFileModel != null && cachedChunkCollection != null && cachedConnection === mongoose.connection) {
+    return { attachmentFileModel: cachedAttachmentFileModel, chunkCollection: cachedChunkCollection };
+  }
+
+  // Check connection state
+  if (mongoose.connection.readyState !== 1) {
+    throw new Error('MongoDB connection is not ready for GridFS operations');
+  }
+
+  // Create new model instances
+  const attachmentFileModel: AttachmentFileModel = createModel({
+    modelName: COLLECTION_NAME,
+    bucketName: COLLECTION_NAME,
+    connection: mongoose.connection,
+  });
+
+  const chunkCollection = mongoose.connection.collection(CHUNK_COLLECTION_NAME);
+
+  // Setup promisified methods on the model instance (not globally)
+  if (!attachmentFileModel.promisifiedWrite) {
+    attachmentFileModel.promisifiedWrite = util.promisify(attachmentFileModel.write).bind(attachmentFileModel);
+    attachmentFileModel.promisifiedUnlink = util.promisify(attachmentFileModel.unlink).bind(attachmentFileModel);
+  }
+
+  // Cache the instances
+  cachedAttachmentFileModel = attachmentFileModel;
+  cachedChunkCollection = chunkCollection;
+  cachedConnection = mongoose.connection;
+
+  logger.debug('GridFS models initialized successfully');
+
+  return { attachmentFileModel, chunkCollection };
+}
 
 
 // TODO: rewrite this module to be a type-safe implementation
@@ -65,13 +114,14 @@ class GridfsFileUploader extends AbstractFileUploader {
   override async uploadAttachment(readable: Readable, attachment: IAttachmentDocument): Promise<void> {
     logger.debug(`File uploading: fileName=${attachment.fileName}`);
 
-    const contentHeaders = new ContentHeaders(attachment);
+    const { attachmentFileModel } = initializeGridFSModels();
+    const contentHeaders = createContentHeaders(attachment);
 
-    return AttachmentFile.promisifiedWrite(
+    return attachmentFileModel.promisifiedWrite(
       {
         // put type and the file name for reference information when uploading
         filename: attachment.fileName,
-        contentType: contentHeaders.contentType?.value.toString(),
+        contentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
       },
       readable,
     );
@@ -104,60 +154,42 @@ class GridfsFileUploader extends AbstractFileUploader {
 module.exports = function(crowi: Crowi) {
   const lib = new GridfsFileUploader(crowi);
 
-  // get Collection instance of chunk
-  const chunkCollection = mongoose.connection.collection(CHUNK_COLLECTION_NAME);
-
-  // create promisified method
-  AttachmentFile.promisifiedWrite = util.promisify(AttachmentFile.write).bind(AttachmentFile);
-  AttachmentFile.promisifiedUnlink = util.promisify(AttachmentFile.unlink).bind(AttachmentFile);
-
   lib.isValidUploadSettings = function() {
     return true;
   };
 
   (lib as any).deleteFile = async function(attachment) {
+    const { attachmentFileModel } = initializeGridFSModels();
     const filenameValue = attachment.fileName;
 
-    const attachmentFile = await AttachmentFile.findOne({ filename: filenameValue });
+    const attachmentFile = await attachmentFileModel.findOne({ filename: filenameValue });
 
     if (attachmentFile == null) {
       logger.warn(`Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in GridFS`);
       return;
     }
-    return AttachmentFile.promisifiedUnlink({ _id: attachmentFile._id });
+
+    return attachmentFileModel.promisifiedUnlink({ _id: attachmentFile._id });
   };
 
+  /**
+   * Bulk delete files since unlink method of mongoose-gridfs does not support bulk operation
+   */
   (lib as any).deleteFiles = async function(attachments) {
+    const { attachmentFileModel, chunkCollection } = initializeGridFSModels();
+
     const filenameValues = attachments.map((attachment) => {
       return attachment.fileName;
     });
-    const fileIdObjects = await AttachmentFile.find({ filename: { $in: filenameValues } }, { _id: 1 });
+    const fileIdObjects = await attachmentFileModel.find({ filename: { $in: filenameValues } }, { _id: 1 });
     const idsRelatedFiles = fileIdObjects.map((obj) => { return obj._id });
 
     return Promise.all([
-      AttachmentFile.deleteMany({ filename: { $in: filenameValues } }),
+      attachmentFileModel.deleteMany({ filename: { $in: filenameValues } }),
       chunkCollection.deleteMany({ files_id: { $in: idsRelatedFiles } }),
     ]);
   };
 
-  /**
-   * get size of data uploaded files using (Promise wrapper)
-   */
-  // const getCollectionSize = () => {
-  //   return new Promise((resolve, reject) => {
-  //     chunkCollection.stats((err, data) => {
-  //       if (err) {
-  //         // return 0 if not exist
-  //         if (err.errmsg.includes('not found')) {
-  //           return resolve(0);
-  //         }
-  //         return reject(err);
-  //       }
-  //       return resolve(data.size);
-  //     });
-  //   });
-  // };
-
   /**
    * check the file size limit
    *
@@ -172,17 +204,44 @@ module.exports = function(crowi: Crowi) {
   };
 
   lib.saveFile = async function({ filePath, contentType, data }) {
-    const readable = new Readable();
-    readable.push(data);
-    readable.push(null); // EOF
+    const { attachmentFileModel } = initializeGridFSModels();
 
-    return AttachmentFile.promisifiedWrite(
-      {
-        filename: filePath,
-        contentType,
+    // Create a readable stream from the data
+    const readable = new Readable({
+      read() {
+        this.push(data);
+        this.push(null); // EOF
       },
-      readable,
-    );
+    });
+
+    try {
+      // Add error handling to prevent resource leaks
+      readable.on('error', (err) => {
+        logger.error('Readable stream error:', err);
+        readable.destroy();
+        throw err;
+      });
+
+      // Use async/await for cleaner code
+      const result = await attachmentFileModel.promisifiedWrite(
+        {
+          filename: filePath,
+          contentType,
+        },
+        readable,
+      );
+
+      return result;
+    }
+    catch (error) {
+      throw error;
+    }
+    finally {
+      // Explicit cleanup to prevent memory leaks
+      if (typeof readable.destroy === 'function') {
+        readable.destroy();
+      }
+    }
   };
 
   /**
@@ -192,23 +251,26 @@ module.exports = function(crowi: Crowi) {
    * @return {stream.Readable} readable stream
    */
   lib.findDeliveryFile = async function(attachment) {
+    const { attachmentFileModel } = initializeGridFSModels();
     const filenameValue = attachment.fileName;
 
-    const attachmentFile = await AttachmentFile.findOne({ filename: filenameValue });
+    const attachmentFile = await attachmentFileModel.findOne({ filename: filenameValue });
 
     if (attachmentFile == null) {
       throw new Error(`Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in GridFS`);
     }
 
     // return stream.Readable
-    return AttachmentFile.read({ _id: attachmentFile._id });
+    return attachmentFileModel.read({ _id: attachmentFile._id });
   };
 
   /**
    * List files in storage
    */
   (lib as any).listFiles = async function() {
-    const attachmentFiles = await AttachmentFile.find();
+    const { attachmentFileModel } = initializeGridFSModels();
+
+    const attachmentFiles = await attachmentFileModel.find();
     return attachmentFiles.map(({ filename: name, length: size }) => ({
       name, size,
     }));

+ 25 - 4
apps/app/src/server/service/file-uploader/local.ts

@@ -15,7 +15,7 @@ import {
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
 } from './file-uploader';
 import {
-  ContentHeaders, applyHeaders,
+  applyHeaders, createContentHeaders, toExpressHttpHeaders,
 } from './utils';
 
 
@@ -166,7 +166,28 @@ module.exports = function(crowi: Crowi) {
 
     const writeStream: Writable = fs.createWriteStream(filePath);
 
-    return pipeline(fileStream, writeStream);
+    try {
+      const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+      await pipeline(
+        fileStream,
+        writeStream,
+        { signal: AbortSignal.timeout(uploadTimeout) },
+      );
+
+      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
+    }
+    catch (error) {
+      // Handle timeout error specifically
+      if (error.name === 'AbortError') {
+        logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
+      }
+      else {
+        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      }
+      // Re-throw the error to be handled by the caller.
+      // The pipeline automatically handles stream cleanup on error.
+      throw error;
+    }
   };
 
   lib.saveFile = async function({ filePath, contentType, data }) {
@@ -229,9 +250,9 @@ module.exports = function(crowi: Crowi) {
     const internalPath = urljoin(internalPathRoot, relativePath);
 
     const isDownload = opts?.download ?? false;
-    const contentHeaders = new ContentHeaders(attachment, { inline: !isDownload });
+    const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
     applyHeaders(res, [
-      ...contentHeaders.toExpressHttpHeaders(),
+      ...toExpressHttpHeaders(contentHeaders),
       { field: 'X-Accel-Redirect', value: internalPath },
       { field: 'X-Sendfile', value: storagePath },
     ]);

+ 47 - 47
apps/app/src/server/service/file-uploader/utils/headers.ts

@@ -1,68 +1,68 @@
 import type { Response } from 'express';
 
-import type { ExpressHttpHeader, IContentHeaders } from '~/server/interfaces/attachment';
+import type { ExpressHttpHeader } from '~/server/interfaces/attachment';
 import type { IAttachmentDocument } from '~/server/models/attachment';
 
+type ContentHeaderField = 'Content-Type' | 'Content-Security-Policy' | 'Content-Disposition' | 'Content-Length';
+type ContentHeader = ExpressHttpHeader<ContentHeaderField>;
 
-export class ContentHeaders implements IContentHeaders {
-
-  contentType?: ExpressHttpHeader<'Content-Type'>;
+/**
+ * Factory function to generate content headers.
+ * This approach avoids creating a class instance for each call, improving memory efficiency.
+ */
+export const createContentHeaders = (attachment: IAttachmentDocument, opts?: { inline?: boolean }): ContentHeader[] => {
+  const headers: ContentHeader[] = [];
 
-  contentLength?: ExpressHttpHeader<'Content-Length'>;
+  // Content-Type
+  headers.push({
+    field: 'Content-Type',
+    value: attachment.fileFormat,
+  });
 
-  contentSecurityPolicy?: ExpressHttpHeader<'Content-Security-Policy'>;
+  // Content-Security-Policy
+  headers.push({
+    field: 'Content-Security-Policy',
+    // eslint-disable-next-line max-len
+    value: "script-src 'unsafe-hashes'; style-src 'self' 'unsafe-inline'; object-src 'none'; require-trusted-types-for 'script'; media-src 'self'; default-src 'none';",
+  });
 
-  contentDisposition?: ExpressHttpHeader<'Content-Disposition'>;
+  // Content-Disposition
+  headers.push({
+    field: 'Content-Disposition',
+    value: `${opts?.inline ? 'inline' : 'attachment'};filename*=UTF-8''${encodeURIComponent(attachment.originalName)}`,
+  });
 
-  constructor(attachment: IAttachmentDocument, opts?: {
-    inline?: boolean,
-  }) {
+  // Content-Length
+  if (attachment.fileSize != null) {
+    headers.push({
+      field: 'Content-Length',
+      value: attachment.fileSize.toString(),
+    });
+  }
 
-    this.contentType = {
-      field: 'Content-Type',
-      value: attachment.fileFormat,
-    };
-    this.contentSecurityPolicy = {
-      field: 'Content-Security-Policy',
-      // eslint-disable-next-line max-len
-      value: "script-src 'unsafe-hashes'; style-src 'self' 'unsafe-inline'; object-src 'none'; require-trusted-types-for 'script'; media-src 'self'; default-src 'none';",
-    };
-    this.contentDisposition = {
-      field: 'Content-Disposition',
-      value: `${opts?.inline ? 'inline' : 'attachment'};filename*=UTF-8''${encodeURIComponent(attachment.originalName)}`,
-    };
+  return headers;
+};
 
-    if (attachment.fileSize) {
-      this.contentLength = {
-        field: 'Content-Length',
-        value: attachment.fileSize.toString(),
-      };
-    }
-  }
+export const getContentHeaderValue = (contentHeaders: ContentHeader[], field: ContentHeaderField): string | undefined => {
+  const header = contentHeaders.find(h => h.field === field);
+  return header?.value.toString();
+};
 
-  /**
-   * Convert to ExpressHttpHeader[]
-   */
-  toExpressHttpHeaders(): ExpressHttpHeader[] {
-    return [
-      this.contentType,
-      this.contentLength,
-      this.contentSecurityPolicy,
-      this.contentDisposition,
-    ]
+/**
+ * Convert to ExpressHttpHeader[]
+ */
+export function toExpressHttpHeaders(records: Record<string, string | string[]>): ExpressHttpHeader[];
+export function toExpressHttpHeaders(contentHeaders: ContentHeader[]): ExpressHttpHeader[];
+export function toExpressHttpHeaders(arg: Record<string, string | string[]> | ContentHeader[]): ExpressHttpHeader[] {
+  if (Array.isArray(arg)) {
+    return arg
       // exclude undefined
       .filter((member): member is NonNullable<typeof member> => member != null);
   }
 
+  return Object.entries(arg).map(([field, value]) => { return { field, value } });
 }
 
-/**
- * Convert Record to ExpressHttpHeader[]
- */
-export const toExpressHttpHeaders = (records: Record<string, string | string[]>): ExpressHttpHeader[] => {
-  return Object.entries(records).map(([field, value]) => { return { field, value } });
-};
-
 export const applyHeaders = (res: Response, headers: ExpressHttpHeader[]): void => {
   headers.forEach((header) => {
     res.header(header.field, header.value);

+ 6 - 6
apps/app/src/server/service/g2g-transfer.ts

@@ -189,7 +189,7 @@ interface Receiver {
     innerFileStats: any[],
     optionsMap: { [key: string]: GrowiArchiveImportOption; },
     operatorUserId: string,
-  ): { [key: string]: ImportSettings; }
+  ): Map<string, ImportSettings>
   /**
    * Import collections
    * @param {string} collections Array of collection name
@@ -198,7 +198,7 @@ interface Receiver {
    */
   importCollections(
     collections: string[],
-    importSettingsMap: { [key: string]: ImportSettings; },
+    importSettingsMap: Map<string, ImportSettings>,
     sourceGROWIUploadConfigs: FileUploadConfigs,
   ): Promise<void>
   /**
@@ -618,8 +618,8 @@ export class G2GTransferReceiverService implements Receiver {
       innerFileStats: any[],
       optionsMap: { [key: string]: GrowiArchiveImportOption; },
       operatorUserId: string,
-  ): { [key: string]: ImportSettings; } {
-    const importSettingsMap = {};
+  ): Map<string, ImportSettings> {
+    const importSettingsMap = new Map<string, ImportSettings>();
     innerFileStats.forEach(({ fileName, collectionName }) => {
       const options = new GrowiArchiveImportOption(collectionName, undefined, optionsMap[collectionName]);
 
@@ -641,7 +641,7 @@ export class G2GTransferReceiverService implements Receiver {
         jsonFileName: fileName,
         overwriteParams: generateOverwriteParams(collectionName, operatorUserId, options),
       };
-      importSettingsMap[collectionName] = importSettings;
+      importSettingsMap.set(collectionName, importSettings);
     });
 
     return importSettingsMap;
@@ -649,7 +649,7 @@ export class G2GTransferReceiverService implements Receiver {
 
   public async importCollections(
       collections: string[],
-      importSettingsMap: { [key: string]: ImportSettings; },
+      importSettingsMap: Map<string, ImportSettings>,
       sourceGROWIUploadConfigs: FileUploadConfigs,
   ): Promise<void> {
     const { appService } = this.crowi;

+ 1 - 3
apps/app/src/server/service/growi-bridge/index.ts

@@ -19,7 +19,7 @@ const logger = loggerFactory('growi:services:GrowiBridgeService'); // eslint-dis
  * the service class for bridging GROWIs (export and import)
  * common properties and methods between export service and import service are defined in this service
  */
-class GrowiBridgeService {
+export class GrowiBridgeService {
 
   crowi: Crowi;
 
@@ -123,5 +123,3 @@ class GrowiBridgeService {
   }
 
 }
-
-export default GrowiBridgeService;

+ 23 - 3
apps/app/src/server/service/import/construct-convert-map.ts

@@ -10,10 +10,29 @@ export type ConvertMap = {
   }
 }
 
+/**
+ * Special conversion functions for problematic fields
+ * Add entries here for fields that require custom handling during import
+ */
+const SPECIAL_CONVERT_FUNCTIONS: Record<string, Record<string, OverwriteFunction>> = {
+  activities: {
+    snapshot: (value: unknown) => value, // Skip SubdocumentPath casting to avoid Mongoose errors
+  },
+  // Add more collections and fields as needed:
+  // otherCollection: {
+  //   problematicField: (value: unknown) => customProcessing(value),
+  // },
+};
+
+/**
+ * Get special conversion function for a specific collection.field combination
+ */
+const getSpecialConvertFunction = (collectionName: string, propertyName: string): OverwriteFunction | null => {
+  return SPECIAL_CONVERT_FUNCTIONS[collectionName]?.[propertyName] ?? null;
+};
+
 /**
  * Initialize convert map. set keepOriginal as default
- *
- * @param {Crowi} crowi Crowi instance
  */
 export const constructConvertMap = (): ConvertMap => {
   const convertMap: ConvertMap = {};
@@ -30,7 +49,8 @@ export const constructConvertMap = (): ConvertMap => {
     convertMap[collectionName] = {};
 
     for (const key of Object.keys(model.schema.paths)) {
-      convertMap[collectionName][key] = keepOriginal;
+      const specialHandler = getSpecialConvertFunction(collectionName, key);
+      convertMap[collectionName][key] = specialHandler ?? keepOriginal;
     }
   });
 

+ 131 - 95
apps/app/src/server/service/import/import.ts

@@ -1,13 +1,13 @@
 import fs from 'fs';
 import path from 'path';
 import type { EventEmitter } from 'stream';
-import { Writable, Transform, pipeline } from 'stream';
-import { finished, pipeline as pipelinePromise } from 'stream/promises';
+import { Writable, Transform } from 'stream';
+import { pipeline } from 'stream/promises';
 
 import JSONStream from 'JSONStream';
 import gc from 'expose-gc/function';
 import type {
-  BulkWriteResult, MongoBulkWriteError, UnorderedBulkOperation, WriteError,
+  BulkWriteResult, MongoBulkWriteError, UnorderedBulkOperation, WriteError, BulkOperationBase,
 } from 'mongodb';
 import type { Document } from 'mongoose';
 import mongoose from 'mongoose';
@@ -51,6 +51,8 @@ class ImportingCollectionError extends Error {
 
 export class ImportService {
 
+  private modelCache: Map<string, { Model: any, schema: any }> = new Map();
+
   private crowi: Crowi;
 
   private growiBridgeService: any;
@@ -59,7 +61,7 @@ export class ImportService {
 
   private currentProgressingStatus: CollectionProgressingStatus | null;
 
-  private convertMap: ConvertMap;
+  private convertMap: ConvertMap | undefined;
 
   constructor(crowi: Crowi) {
     this.crowi = crowi;
@@ -139,7 +141,7 @@ export class ImportService {
    * @param collections MongoDB collection name
    * @param importSettingsMap
    */
-  async import(collections: string[], importSettingsMap: { [collectionName: string]: ImportSettings }): Promise<void> {
+  async import(collections: string[], importSettingsMap: Map<string, ImportSettings>): Promise<void> {
     await this.preImport();
 
     // init status object
@@ -147,7 +149,10 @@ export class ImportService {
 
     // process serially so as not to waste memory
     const promises = collections.map((collectionName) => {
-      const importSettings = importSettingsMap[collectionName];
+      const importSettings = importSettingsMap.get(collectionName);
+      if (importSettings == null) {
+        throw new Error(`ImportSettings for ${collectionName} is not found`);
+      }
       return this.importCollection(collectionName, importSettings);
     });
     for await (const promise of promises) {
@@ -172,6 +177,10 @@ export class ImportService {
     const shouldNormalizePages = currentIsV5Compatible && isImportPagesCollection;
 
     if (shouldNormalizePages) await this.crowi.pageService.normalizeAllPublicPages();
+
+    // Release caches after import process
+    this.modelCache.clear();
+    this.convertMap = undefined;
   }
 
   /**
@@ -183,13 +192,7 @@ export class ImportService {
     if (this.currentProgressingStatus == null) {
       throw new Error('Something went wrong: currentProgressingStatus is not initialized');
     }
-
-    // prepare functions invoked from custom streams
-    const convertDocuments = this.convertDocuments.bind(this);
-    const bulkOperate = this.bulkOperate.bind(this);
-    const execUnorderedBulkOpSafely = this.execUnorderedBulkOpSafely.bind(this);
-    const emitProgressEvent = this.emitProgressEvent.bind(this);
-
+    // Avoid closure references by passing direct method references
     const collection = mongoose.connection.collection(collectionName);
 
     const { mode, jsonFileName, overwriteParams } = importSettings;
@@ -215,52 +218,71 @@ export class ImportService {
       // stream 3
       const convertStream = new Transform({
         objectMode: true,
-        transform(doc, encoding, callback) {
-          const converted = convertDocuments(collectionName, doc, overwriteParams);
-          this.push(converted);
-          callback();
+        transform(this: Transform, doc, encoding, callback) {
+          try {
+          // Direct reference to convertDocuments
+            const converted = (importSettings as any).service.convertDocuments(collectionName, doc, overwriteParams);
+            this.push(converted);
+            callback();
+          }
+          catch (error) {
+            callback(error);
+          }
         },
       });
+      // Reference for importService within Transform
+      (importSettings as any).service = this;
 
       // stream 4
       const batchStream = createBatchStream(BULK_IMPORT_SIZE);
-
-      // stream 5
       const writeStream = new Writable({
         objectMode: true,
-        async write(batch, encoding, callback) {
-          const unorderedBulkOp = collection.initializeUnorderedBulkOp();
-
-          // documents are not persisted until unorderedBulkOp.execute()
-          batch.forEach((document) => {
-            bulkOperate(unorderedBulkOp, collectionName, document, importSettings);
-          });
-
-          // exec
-          const { result, errors } = await execUnorderedBulkOpSafely(unorderedBulkOp);
-          const { insertedCount, modifiedCount } = result;
-          const errorCount = errors?.length ?? 0;
-
-          logger.debug(`Importing ${collectionName}. Inserted: ${insertedCount}. Modified: ${modifiedCount}. Failed: ${errorCount}.`);
-
-          const increment = insertedCount + modifiedCount + errorCount;
-          collectionProgress.currentCount += increment;
-          collectionProgress.totalCount += increment;
-          collectionProgress.insertedCount += insertedCount;
-          collectionProgress.modifiedCount += modifiedCount;
-
-          emitProgressEvent(collectionProgress, errors);
-
+        write: async(batch, encoding, callback) => {
           try {
+            const unorderedBulkOp = collection.initializeUnorderedBulkOp();
+            // documents are not persisted until unorderedBulkOp.execute()
+            batch.forEach((document) => {
+              this.bulkOperate(unorderedBulkOp, collectionName, document, importSettings);
+            });
+
+            // exec
+            const { result, errors } = await this.execUnorderedBulkOpSafely(unorderedBulkOp);
+            const {
+              insertedCount, modifiedCount, upsertedCount, matchedCount,
+            } = result;
+            const errorCount = errors?.length ?? 0;
+
+            // For upsert operations, count matched documents as modified
+            const actualModifiedCount = importSettings.mode === ImportMode.upsert
+              ? (matchedCount || 0) // In upsert mode, matchedCount indicates documents that were found and potentially updated
+              : modifiedCount;
+
+            const actualInsertedCount = importSettings.mode === ImportMode.upsert
+              ? (upsertedCount || 0) // In upsert mode, upsertedCount indicates newly created documents
+              : insertedCount;
+
+            logger.debug(`Importing ${collectionName}. Inserted: ${actualInsertedCount}. Modified: ${actualModifiedCount}. Failed: ${errorCount}.`
+              + ` (Raw: inserted=${insertedCount}, modified=${modifiedCount}, upserted=${upsertedCount}, matched=${matchedCount})`);
+            const increment = actualInsertedCount + actualModifiedCount + errorCount;
+            collectionProgress.currentCount += increment;
+            collectionProgress.totalCount += increment;
+            collectionProgress.insertedCount += actualInsertedCount;
+            collectionProgress.modifiedCount += actualModifiedCount;
+            this.emitProgressEvent(collectionProgress, errors);
             // First aid to prevent unexplained memory leaks
-            logger.info('global.gc() invoked.');
-            gc();
+            try {
+              logger.info('global.gc() invoked.');
+              gc();
+            }
+            catch (err) {
+              logger.error('fail garbage collection: ', err);
+            }
+            callback();
           }
           catch (err) {
-            logger.error('fail garbage collection: ', err);
+            logger.error('Error in writeStream:', err);
+            callback(err);
           }
-
-          callback();
         },
         final(callback) {
           logger.info(`Importing ${collectionName} has completed.`);
@@ -268,7 +290,13 @@ export class ImportService {
         },
       });
 
-      await pipelinePromise(readStream, jsonStream, convertStream, batchStream, writeStream);
+      await pipeline(readStream, jsonStream, convertStream, batchStream, writeStream);
+
+      // Ensure final progress event is emitted even when no data was processed
+      if (collectionProgress.currentCount === 0) {
+        logger.info(`No data processed for collection ${collectionName}. Emitting final progress event.`);
+        this.emitProgressEvent(collectionProgress, null);
+      }
 
       // clean up tmp directory
       fs.unlinkSync(jsonFile);
@@ -276,15 +304,9 @@ export class ImportService {
     catch (err) {
       throw new ImportingCollectionError(collectionProgress, err);
     }
-
   }
 
-  /**
-   *
-   * @param {string} collectionName
-   * @param {importSettings} importSettings
-   */
-  validateImportSettings(collectionName, importSettings) {
+  validateImportSettings(collectionName: string, importSettings: ImportSettings): void {
     const { mode } = importSettings;
 
     switch (collectionName) {
@@ -298,15 +320,18 @@ export class ImportService {
 
   /**
    * process bulk operation
-   * @param bulk MongoDB Bulk instance
-   * @param collectionName collection name
    */
-  bulkOperate(bulk, collectionName: string, document, importSettings: ImportSettings) {
+  bulkOperate(
+      bulk: UnorderedBulkOperation,
+      collectionName: string,
+      document: Record<string, unknown>,
+      importSettings: ImportSettings,
+  ): BulkOperationBase | void {
     // insert
     if (importSettings.mode !== ImportMode.upsert) {
+      // Optimization such as splitting and adding large documents can be considered
       return bulk.insert(document);
     }
-
     // upsert
     switch (collectionName) {
       case 'pages':
@@ -321,7 +346,7 @@ export class ImportService {
    * @param {CollectionProgress} collectionProgress
    * @param {object} appendedErrors key: collection name, value: array of error object
    */
-  emitProgressEvent(collectionProgress, appendedErrors) {
+  emitProgressEvent(collectionProgress: CollectionProgress, appendedErrors: any): void {
     const { collectionName } = collectionProgress;
 
     // send event (in progress in global)
@@ -331,7 +356,7 @@ export class ImportService {
   /**
    * emit terminate event
    */
-  emitTerminateEvent() {
+  emitTerminateEvent(): void {
     this.adminEvent.emit('onTerminateForImport');
   }
 
@@ -342,13 +367,12 @@ export class ImportService {
    * @param {string} zipFile absolute path to zip file
    * @return {Array.<string>} array of absolute paths to extracted files
    */
-  async unzip(zipFile) {
+  async unzip(zipFile: string): Promise<string[]> {
     const readStream = fs.createReadStream(zipFile);
     const parseStream = unzipStream.Parse();
-    const unzipEntryStream = pipeline(readStream, parseStream, () => {});
-    const files: string[] = [];
+    const entryPromises: Promise<string | null>[] = [];
 
-    unzipEntryStream.on('entry', (/** @type {Entry} */ entry) => {
+    parseStream.on('entry', (/** @type {Entry} */ entry) => {
       const fileName = entry.path;
       // https://regex101.com/r/mD4eZs/6
       // prevent from unexpecting attack doing unzip file (path traversal attack)
@@ -356,6 +380,7 @@ export class ImportService {
       // ../../src/server/example.html
       if (fileName.match(/(\.\.\/|\.\.\\)/)) {
         logger.error('File path is not appropriate.', fileName);
+        entry.autodrain();
         return;
       }
 
@@ -364,16 +389,28 @@ export class ImportService {
         entry.autodrain();
       }
       else {
-        const jsonFile = path.join(this.baseDir, fileName);
-        const writeStream = fs.createWriteStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
-        pipeline(entry, writeStream, () => {});
-        files.push(jsonFile);
+        const entryPromise = new Promise<string | null>((resolve) => {
+          const jsonFile = path.join(this.baseDir, fileName);
+          const writeStream = fs.createWriteStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
+
+          pipeline(entry, writeStream)
+            .then(() => resolve(jsonFile))
+            .catch((err) => {
+              logger.error('Failed to extract entry:', err);
+              resolve(null); // Continue processing other entries
+            });
+        });
+
+        entryPromises.push(entryPromise);
       }
     });
 
-    await finished(unzipEntryStream);
+    await pipeline(readStream, parseStream);
+    const results = await Promise.allSettled(entryPromises);
 
-    return files;
+    return results
+      .filter((result): result is PromiseFulfilledResult<string> => result.status === 'fulfilled' && result.value !== null)
+      .map(result => result.value);
   }
 
   /**
@@ -414,32 +451,32 @@ export class ImportService {
    * @returns document to be persisted
    */
   convertDocuments<D extends Document>(collectionName: string, document: D, overwriteParams: OverwriteParams): D {
-    const Model = getModelFromCollectionName(collectionName);
-    const schema = (Model != null) ? Model.schema : undefined;
-    const convertMap = this.convertMap[collectionName];
+  // Model and schema cache (optimization)
+    if (!this.modelCache) {
+      this.modelCache = new Map();
+    }
 
-    const _document: D = structuredClone(document);
+    let modelInfo = this.modelCache.get(collectionName);
+    if (!modelInfo) {
+      const Model = getModelFromCollectionName(collectionName);
+      const schema = (Model != null) ? Model.schema : undefined;
+      modelInfo = { Model, schema };
+      this.modelCache.set(collectionName, modelInfo);
+    }
 
-    // apply keepOriginal to all of properties
-    Object.entries(document).forEach(([propertyName, value]) => {
-      _document[propertyName] = keepOriginal(value, { document, propertyName });
-    });
+    const { schema } = modelInfo;
+    const convertMap = this.convertMap?.[collectionName];
 
-    // Mongoose Model
-    if (convertMap != null) {
-      // assign value from documents being imported
-      Object.entries(convertMap).forEach(([propertyName, convertedValue]) => {
-        const value = document[propertyName];
+    // Use shallow copy instead of structuredClone() when sufficient
+    const _document: D = (typeof document === 'object' && document !== null && !Array.isArray(document)) ? { ...document } : structuredClone(document);
 
-        // distinguish between null and undefined
-        if (value === undefined) {
-          return; // next entry
-        }
+    Object.entries(document).forEach(([propertyName, value]) => {
+      // Check if there's a custom convert function for this property, otherwise use keepOriginal
+      const convertedValue = convertMap?.[propertyName];
+      const convertFunc = (convertedValue != null && typeof convertedValue === 'function') ? convertedValue : keepOriginal;
 
-        const convertFunc = (typeof convertedValue === 'function') ? convertedValue : null;
-        _document[propertyName] = (convertFunc != null) ? convertFunc(value, { document, propertyName, schema }) : convertedValue;
-      });
-    }
+      _document[propertyName] = convertFunc(value, { document, propertyName, schema });
+    });
 
     // overwrite documents with custom values
     Object.entries(overwriteParams).forEach(([propertyName, overwriteValue]) => {
@@ -451,7 +488,6 @@ export class ImportService {
         _document[propertyName] = (overwriteFunc != null) ? overwriteFunc(value, { document: _document, propertyName, schema }) : overwriteValue;
       }
     });
-
     return _document;
   }
 
@@ -463,7 +499,7 @@ export class ImportService {
    * @memberOf ImportService
    * @param {object} meta meta data from meta.json
    */
-  validate(meta) {
+  validate(meta: any): void {
     if (meta.version !== getGrowiVersion()) {
       throw new Error('The version of this GROWI and the uploaded GROWI data are not the same');
     }
@@ -476,7 +512,7 @@ export class ImportService {
   /**
    * Delete all uploaded files
    */
-  deleteAllZipFiles() {
+  deleteAllZipFiles(): void {
     fs.readdirSync(this.baseDir)
       .filter(file => path.extname(file) === '.zip')
       .forEach(file => fs.unlinkSync(path.join(this.baseDir, file)));

+ 30 - 6
apps/app/src/server/service/search-delegator/elasticsearch.ts

@@ -75,6 +75,10 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
 
   private indexName: string;
 
+  private pageModel?: PageModel;
+
+  private userModel?: typeof mongoose.Model;
+
   constructor(socketIoService: SocketIoService) {
     this.name = SearchDelegatorName.DEFAULT;
     this.socketIoService = socketIoService;
@@ -92,6 +96,26 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
     this.isElasticsearchReindexOnBoot = configManager.getConfig('app:elasticsearchReindexOnBoot');
   }
 
+  /**
+   * Get Page model with proper typing
+   */
+  private getPageModel(): PageModel {
+    if (!this.pageModel) {
+      this.pageModel = mongoose.model<IPage, PageModel>('Page');
+    }
+    return this.pageModel;
+  }
+
+  /**
+   * Get User model with proper typing
+   */
+  private getUserModel() {
+    if (!this.userModel) {
+      this.userModel = mongoose.model('User');
+    }
+    return this.userModel;
+  }
+
   get aliasName(): string {
     return `${this.indexName}-alias`;
   }
@@ -416,17 +440,17 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
   }
 
   addAllPages() {
-    const Page = mongoose.model('Page');
+    const Page = this.getPageModel();
     return this.updateOrInsertPages(() => Page.find(), { shouldEmitProgress: true, invokeGarbageCollection: true });
   }
 
   updateOrInsertPageById(pageId) {
-    const Page = mongoose.model('Page');
+    const Page = this.getPageModel();
     return this.updateOrInsertPages(() => Page.findById(pageId));
   }
 
   updateOrInsertDescendantsPagesById(page, user) {
-    const Page = mongoose.model('Page') as unknown as PageModel;
+    const Page = this.getPageModel();
     const { PageQueryBuilder } = Page;
     const builder = new PageQueryBuilder(Page.find());
     builder.addConditionToListWithDescendants(page.path);
@@ -439,7 +463,7 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
   async updateOrInsertPages(queryFactory, option: UpdateOrInsertPagesOpts = {}): Promise<void> {
     const { shouldEmitProgress = false, invokeGarbageCollection = false } = option;
 
-    const Page = mongoose.model<IPage, PageModel>('Page');
+    const Page = this.getPageModel();
     const { PageQueryBuilder } = Page;
 
     const socket = shouldEmitProgress ? this.socketIoService.getAdminSocket() : null;
@@ -827,7 +851,7 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
       throw new Error('query.body.query.bool is not initialized');
     }
 
-    const Page = mongoose.model('Page') as unknown as PageModel;
+    const Page = this.getPageModel();
     const {
       GRANT_PUBLIC, GRANT_SPECIFIED, GRANT_OWNER, GRANT_USER_GROUP,
     } = Page;
@@ -886,7 +910,7 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
   }
 
   async appendFunctionScore(query, queryString): Promise<void> {
-    const User = mongoose.model('User');
+    const User = this.getUserModel();
     const count = await User.count({}) || 1;
 
     const minScore = queryString.length * 0.1 - 1; // increase with length

+ 76 - 70
pnpm-lock.yaml

@@ -509,8 +509,8 @@ importers:
         specifier: ^6.13.6
         version: 6.13.8(@aws-sdk/client-sso-oidc@3.600.0)
       mongoose-gridfs:
-        specifier: ^1.2.42
-        version: 1.2.42(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0))
+        specifier: ^1.3.0
+        version: 1.3.0(@aws-sdk/client-sso-oidc@3.600.0)(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0))
       mongoose-paginate-v2:
         specifier: ^1.3.9
         version: 1.8.2
@@ -3400,23 +3400,26 @@ packages:
   '@lezer/yaml@1.0.3':
     resolution: {integrity: sha512-GuBLekbw9jDBDhGur82nuwkxKQ+a3W5H0GfaAthDXcAu+XdpS43VlnxA9E9hllkpSP5ellRDKjLLj7Lu9Wr6xA==}
 
-  '@lykmapipo/common@0.34.3':
-    resolution: {integrity: sha512-rdLJkeatlCWEZFXC142V/fLAuKHREJcfPSC7OAjpn4DEvANfmvCgwLl+gwLLwFsn8lwnBDGo+7Y6pwirw86FpA==}
+  '@lykmapipo/common@0.44.5':
+    resolution: {integrity: sha512-xkG/1aaOPMdyMKwJ4reNTlwN/s2drXmgfF7Siwwz/SHjKFOnonALmKURLyKuZ8FmSByP5ohaMjPCnzfO4YMM1A==}
     engines: {node: '>=8.11.1', npm: '>=5.6.0'}
 
-  '@lykmapipo/env@0.17.8':
-    resolution: {integrity: sha512-wtwBhTACxMZ342j1CSUUXtiNQOH+yl+8vyptBXcQtZCz5QCHMO7sInCgtKUezEduaY9evs/aNINwrcTVA485dQ==}
+  '@lykmapipo/env@0.17.39':
+    resolution: {integrity: sha512-0V2x4+Lao/7SzxQcO4LCYLjtrrUS06fNJXVLcyUBAN8JnuM3kbo0GR1mujaiV72Q9/1bIdZDxfJW2OnNyKJGdQ==}
     engines: {node: '>=8.11.1', npm: '>=5.6.0'}
 
-  '@lykmapipo/mongoose-common@0.35.0':
-    resolution: {integrity: sha512-XvbiTSkhI8bhfHw4slXpWxbRsDe27XhM0946JMySGcgG7T1Ohe+I+C8nTKzsORU5EcBdyyYgHgTIpdc55oXlcg==}
-    peerDependencies:
-      mongoose: '>=5.9.17'
+  '@lykmapipo/mongoose-common@0.40.0':
+    resolution: {integrity: sha512-dU32a3iq0nSSWkPTqr4LA+gcC2NfpyGZr1pd9YFn1jfpw9M2Y0qfGhugzaQJ3rP8w3zGJHt8k3+6WLOLLaDylQ==}
+    engines: {node: '>=8.11.1', npm: '>=5.6.0'}
 
-  '@lykmapipo/phone@0.6.5':
-    resolution: {integrity: sha512-b3x17Rn7E/20hf7RFbd2szwa05C/SIRCnjgcFoOi3YYLkIlKIWU/IB596EHmx8nYiX9XYb+RIdvvcx2WhgR/8A==}
+  '@lykmapipo/mongoose-connection@0.5.2':
+    resolution: {integrity: sha512-9ykz/IoraBBZmF3IndHM/QJO6VSb5GRO8jg3F+ZEzr0qoYYQpLRUgl4HzCYkgKXWES2ccjx4XWoruD2zWDCAbg==}
     engines: {node: '>=8.11.1', npm: '>=5.6.0'}
 
+  '@lykmapipo/phone@0.7.16':
+    resolution: {integrity: sha512-YkHyZav72pgXpa0oBqVxud8Mdw/T9LmopMMECzTUTQ6UqXeehd9UuDcQauBY3vC9+MjHL0I9Cd2yF9+24/AZQQ==}
+    engines: {node: '>=14.5.0', npm: '>=6.14.5'}
+
   '@manypkg/find-root@1.1.0':
     resolution: {integrity: sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA==}
 
@@ -6625,8 +6628,8 @@ packages:
   browser-bunyan@1.8.0:
     resolution: {integrity: sha512-Et1TaRUm8m2oy4OTi69g0qAM8wqpofACUgkdBnj1Kq2aC8Wpl8w+lNevebPG6zKH2w0Aq+BHiAXWwjm0/QbkaQ==}
 
-  browser-or-node@1.2.1:
-    resolution: {integrity: sha512-sVIA0cysIED0nbmNOm7sZzKfgN1rpFmrqvLZaFWspaBAftfQcezlC81G6j6U2RJf4Lh66zFxrCeOsvkUXIcPWg==}
+  browser-or-node@3.0.0:
+    resolution: {integrity: sha512-iczIdVJzGEYhP5DqQxYM9Hh7Ztpqqi+CXZpSmX8ALFs9ecXkQIeqRyM6TfxEfMVpwhl3dSuDvxdzzo9sUOIVBQ==}
 
   browser-sync-client@3.0.4:
     resolution: {integrity: sha512-+ew5ubXzGRKVjquBL3u6najS40TG7GxCdyBll0qSRc/n+JRV9gb/yDdRL1IAgRHqjnJTdqeBKKIQabjvjRSYRQ==}
@@ -9300,8 +9303,8 @@ packages:
     resolution: {integrity: sha512-5Rk7iLNDFhFeBYc3s8l1CqzbEBcdhwR193RlD4vSNFajIcINKI8W8P0JLmBpwymHqqWbX34pJDQu39cSy/6RsA==}
     engines: {node: '>=10'}
 
-  google-libphonenumber@3.2.10:
-    resolution: {integrity: sha512-TsckE9O8QgqaIeaOXPjcJa4/kX3BzFdO1oCbMfmUpRZckml4xJhjJVxaT9Mdt/VrZZkT9lX44eHAEWfJK1tHtw==}
+  google-libphonenumber@3.2.42:
+    resolution: {integrity: sha512-60jm6Lu72WmlUJXUBJmmuZlHG2vDJ2gQ9pL5gcFsSe1Q4eigsm0Z1ayNHjMgqGUl0zey8JqKtO4QCHPV+5LCNQ==}
     engines: {node: '>=0.10'}
 
   google-p12-pem@3.1.4:
@@ -9700,9 +9703,9 @@ packages:
   infer-owner@1.0.4:
     resolution: {integrity: sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==}
 
-  inflection@1.12.0:
-    resolution: {integrity: sha512-lRy4DxuIFWXlJU7ed8UiTJOSTqStqYdEb4CEbtXfNbkdj3nH1L+reUWiE10VWcJS2yR7tge8Z74pJjtBjNwj0w==}
-    engines: {'0': node >= 0.4.0}
+  inflection@3.0.2:
+    resolution: {integrity: sha512-+Bg3+kg+J6JUWn8J6bzFmOWkTQ6L/NHfDRSYU+EVvuKHDxUDHAXgqixHfVlzuBQaPOTac8hn43aPhMNk6rMe3g==}
+    engines: {node: '>=18.0.0'}
 
   inflight@1.0.6:
     resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==}
@@ -11335,30 +11338,26 @@ packages:
       socks:
         optional: true
 
-  mongoose-gridfs@1.2.42:
-    resolution: {integrity: sha512-n0yGLrWHeEW5PpR1xvB7bSSqcOnXdWSuwkQyEW8+u98eHfffD2kKT7Re2bxMvIBzOK76Q32uyYkOTzH+Y6MwZQ==}
-    engines: {node: '>=8.6.0', npm: '>=5.3.0'}
+  mongoose-gridfs@1.3.0:
+    resolution: {integrity: sha512-5Rrgb00LN5mRC1s+ddeQ032nGVyRUM6bbX5nqDgWsAJPcpYCjaarnKIlOf+rDxSSAhbn7GzZ3fUvddG+6OX88Q==}
+    engines: {node: '>=8.11.1', npm: '>=5.6.0'}
     peerDependencies:
-      mongoose: '>=5.9.15'
+      mongoose: '>=6.0.7'
 
   mongoose-paginate-v2@1.8.2:
     resolution: {integrity: sha512-T/Z3qKyKnPUa6UkH1IjHxdYnYApCAKk9zb2C0GF5hg3QETcI62AUAUQGCBE2tIw7fF4feUaDARMajj/bersyvg==}
     engines: {node: '>=4.0.0'}
 
-  mongoose-schema-jsonschema@2.0.1:
-    resolution: {integrity: sha512-OHXK/tSziSSuNXKxsjvDyYwnGVB+/c5Dn7p2sI6Vri0vTJm13Nime68YwK8m1j9jgkqh2ZXiO5TyVXTQHtxG8Q==}
-    peerDependencies:
-      mongoose: ^5.0.0 || ^6.0.0
-
   mongoose-unique-validator@2.0.3:
     resolution: {integrity: sha512-3/8pmvAC1acBZS6eWKAWQUiZBlARE1wyWtjga4iQ2wDJeOfRlIKmAvTNHSZXKaAf7RCRUd7wh7as6yWAOrjpQg==}
     peerDependencies:
       mongoose: ^5.2.1
 
-  mongoose-valid8@1.6.18:
-    resolution: {integrity: sha512-0MgK1sD9HXAK7I2lyFRlwNMfZ8+Ahx7rH0Hg6sJyXiXMCazK6Mw4lNcdX0ISjuKkI7joORz2T5Eyw6cJ3q5vQQ==}
+  mongoose-valid8@1.7.1:
+    resolution: {integrity: sha512-65Zf+md73TkMNMUQ3tJzOtEm3MxJW15bpy+lBomqep7FNtiMjNoMzbN0P3/1FCpbqIkXDpeWH7WtyE2+D0tmhg==}
+    engines: {node: '>=8.11.1', npm: '>=5.6.0'}
     peerDependencies:
-      mongoose: '>=5.9.15'
+      mongoose: '>=6.0.7'
 
   mongoose@6.13.8:
     resolution: {integrity: sha512-JHKco/533CyVrqCbyQsnqMpLn8ZCiKrPDTd2mvo2W7ygIvhygWjX2wj+RPjn6upZZgw0jC6U51RD7kUsyK8NBg==}
@@ -11983,10 +11982,6 @@ packages:
     resolution: {integrity: sha512-ybiGyvspI+fAoRQbIPRddCcSTV9/LsJbf0e/S85VLowVGzRmokfneg2kwVW/KU5rOXrPSbF1qAKPMgNTqqROQQ==}
     engines: {node: '>=18'}
 
-  parse-ms@2.1.0:
-    resolution: {integrity: sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==}
-    engines: {node: '>=6'}
-
   parse5-htmlparser2-tree-adapter@6.0.1:
     resolution: {integrity: sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA==}
 
@@ -12502,8 +12497,8 @@ packages:
   randombytes@2.1.0:
     resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==}
 
-  randomcolor@0.5.4:
-    resolution: {integrity: sha512-nYd4nmTuuwMFzHL6W+UWR5fNERGZeVauho8mrJDUSXdNDbao4rbrUwhuLgKC/j8VCS5+34Ria8CsTDuBjrIrQA==}
+  randomcolor@0.6.2:
+    resolution: {integrity: sha512-Mn6TbyYpFgwFuQ8KJKqf3bqqY9O1y37/0jgSK/61PUxV4QfIMv0+K2ioq8DfOjkBslcjwSzRfIDEXfzA9aCx7A==}
 
   range-parser@1.2.1:
     resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==}
@@ -17795,48 +17790,62 @@ snapshots:
       '@lezer/highlight': 1.2.1
       '@lezer/lr': 1.4.2
 
-  '@lykmapipo/common@0.34.3':
+  '@lykmapipo/common@0.44.5':
     dependencies:
       auto-parse: 1.8.0
-      browser-or-node: 1.2.1
+      browser-or-node: 3.0.0
       flat: 5.0.2
-      inflection: 1.12.0
+      inflection: 3.0.2
       lodash: 4.17.21
       mime: 3.0.0
       moment: 2.30.1
       object-hash: 2.2.0
-      parse-json: 5.2.0
-      parse-ms: 2.1.0
-      randomcolor: 0.5.4
+      randomcolor: 0.6.2
       statuses: 2.0.1
       string-template: 1.0.0
       striptags: 3.2.0
       uuid: 11.1.0
 
-  '@lykmapipo/env@0.17.8':
+  '@lykmapipo/env@0.17.39':
     dependencies:
-      '@lykmapipo/common': 0.34.3
+      '@lykmapipo/common': 0.44.5
       dotenv: 16.4.5
       dotenv-expand: 5.1.0
       lodash: 4.17.21
       rc: 1.2.8
       semver: 7.6.3
 
-  '@lykmapipo/mongoose-common@0.35.0(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0))':
+  '@lykmapipo/mongoose-common@0.40.0(@aws-sdk/client-sso-oidc@3.600.0)':
     dependencies:
-      '@lykmapipo/common': 0.34.3
-      '@lykmapipo/env': 0.17.8
+      '@lykmapipo/common': 0.44.5
+      '@lykmapipo/env': 0.17.39
+      '@lykmapipo/mongoose-connection': 0.5.2(@aws-sdk/client-sso-oidc@3.600.0)
       async: 3.2.4
       lodash: 4.17.21
       mongoose: 6.13.8(@aws-sdk/client-sso-oidc@3.600.0)
-      mongoose-schema-jsonschema: 2.0.1(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0))
-      mongoose-valid8: 1.6.18(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0))
+      mongoose-valid8: 1.7.1(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0))
+    transitivePeerDependencies:
+      - '@aws-sdk/client-sso-oidc'
+      - aws-crt
+      - supports-color
 
-  '@lykmapipo/phone@0.6.5':
+  '@lykmapipo/mongoose-connection@0.5.2(@aws-sdk/client-sso-oidc@3.600.0)':
     dependencies:
-      '@lykmapipo/common': 0.34.3
-      '@lykmapipo/env': 0.17.8
-      google-libphonenumber: 3.2.10
+      '@lykmapipo/common': 0.44.5
+      '@lykmapipo/env': 0.17.39
+      async: 3.2.4
+      lodash: 4.17.21
+      mongoose: 6.13.8(@aws-sdk/client-sso-oidc@3.600.0)
+    transitivePeerDependencies:
+      - '@aws-sdk/client-sso-oidc'
+      - aws-crt
+      - supports-color
+
+  '@lykmapipo/phone@0.7.16':
+    dependencies:
+      '@lykmapipo/common': 0.44.5
+      '@lykmapipo/env': 0.17.39
+      google-libphonenumber: 3.2.42
       lodash: 4.17.21
 
   '@manypkg/find-root@1.1.0':
@@ -22307,7 +22316,7 @@ snapshots:
       '@browser-bunyan/console-raw-stream': 1.8.0
       '@browser-bunyan/levels': 1.8.0
 
-  browser-or-node@1.2.1: {}
+  browser-or-node@3.0.0: {}
 
   browser-sync-client@3.0.4:
     dependencies:
@@ -25162,7 +25171,7 @@ snapshots:
       - encoding
       - supports-color
 
-  google-libphonenumber@3.2.10: {}
+  google-libphonenumber@3.2.42: {}
 
   google-p12-pem@3.1.4:
     dependencies:
@@ -25686,7 +25695,7 @@ snapshots:
 
   infer-owner@1.0.4: {}
 
-  inflection@1.12.0: {}
+  inflection@3.0.2: {}
 
   inflight@1.0.6:
     dependencies:
@@ -27750,30 +27759,29 @@ snapshots:
       '@aws-sdk/credential-providers': 3.600.0(@aws-sdk/client-sso-oidc@3.600.0)
       socks: 2.8.3
 
-  mongoose-gridfs@1.2.42(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0)):
+  mongoose-gridfs@1.3.0(@aws-sdk/client-sso-oidc@3.600.0)(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0)):
     dependencies:
-      '@lykmapipo/mongoose-common': 0.35.0(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0))
+      '@lykmapipo/mongoose-common': 0.40.0(@aws-sdk/client-sso-oidc@3.600.0)
       lodash: 4.17.21
       mongoose: 6.13.8(@aws-sdk/client-sso-oidc@3.600.0)
       stream-read: 1.1.2
+    transitivePeerDependencies:
+      - '@aws-sdk/client-sso-oidc'
+      - aws-crt
+      - supports-color
 
   mongoose-paginate-v2@1.8.2: {}
 
-  mongoose-schema-jsonschema@2.0.1(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0)):
-    dependencies:
-      mongoose: 6.13.8(@aws-sdk/client-sso-oidc@3.600.0)
-      pluralize: 8.0.0
-
   mongoose-unique-validator@2.0.3(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0)):
     dependencies:
       lodash.foreach: 4.5.0
       lodash.get: 4.4.2
       mongoose: 6.13.8(@aws-sdk/client-sso-oidc@3.600.0)
 
-  mongoose-valid8@1.6.18(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0)):
+  mongoose-valid8@1.7.1(mongoose@6.13.8(@aws-sdk/client-sso-oidc@3.600.0)):
     dependencies:
-      '@lykmapipo/env': 0.17.8
-      '@lykmapipo/phone': 0.6.5
+      '@lykmapipo/env': 0.17.39
+      '@lykmapipo/phone': 0.7.16
       lodash: 4.17.21
       mongoose: 6.13.8(@aws-sdk/client-sso-oidc@3.600.0)
       validator: 13.12.0
@@ -28573,8 +28581,6 @@ snapshots:
       index-to-position: 1.1.0
       type-fest: 4.41.0
 
-  parse-ms@2.1.0: {}
-
   parse5-htmlparser2-tree-adapter@6.0.1:
     dependencies:
       parse5: 6.0.1
@@ -29060,7 +29066,7 @@ snapshots:
     dependencies:
       safe-buffer: 5.2.1
 
-  randomcolor@0.5.4: {}
+  randomcolor@0.6.2: {}
 
   range-parser@1.2.1: {}