Просмотр исходного кода

Merge branch 'feat/page-bulk-export' into feat/135772-pdf-page-bulk-export

Futa Arai 1 год назад
Родитель
Сommit
ec73d5281c
53 измененных файлов с 1564 добавлено и 1106 удалено
  1. 1 1
      .devcontainer/app/devcontainer.json
  2. 2 1
      .vscode/settings.json
  3. 24 1
      CHANGELOG.md
  4. 1 0
      apps/app/bin/swagger-jsdoc/definition-apiv3.js
  5. 2 4
      apps/app/package.json
  6. 1 1
      apps/app/public/static/locales/en_US/admin.json
  7. 1 1
      apps/app/public/static/locales/fr_FR/admin.json
  8. 1 1
      apps/app/public/static/locales/ja_JP/admin.json
  9. 1 1
      apps/app/public/static/locales/zh_CN/admin.json
  10. 1 0
      apps/app/src/client/components/StaffCredit/StaffCredit.tsx
  11. 1 10
      apps/app/src/client/components/TreeItem/TreeItemLayout.tsx
  12. 1 0
      apps/app/src/components/FontFamily/use-material-symbols-outlined.tsx
  13. 4 2
      apps/app/src/components/FontFamily/use-source-han-code-jp.tsx
  14. 4 1
      apps/app/src/features/openai/client/components/AiIntegration/AiIntegrationDisableMode.tsx
  15. 2 0
      apps/app/src/features/page-bulk-export/interfaces/page-bulk-export.ts
  16. 4 0
      apps/app/src/features/page-bulk-export/server/models/page-bulk-export-job.ts
  17. 2 7
      apps/app/src/features/page-bulk-export/server/routes/apiv3/page-bulk-export.ts
  18. 38 0
      apps/app/src/features/page-bulk-export/server/service/check-page-bulk-export-job-in-progress-cron.ts
  19. 11 11
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-clean-up-cron.integ.ts
  20. 16 16
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-clean-up-cron.ts
  21. 15 0
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/errors.ts
  22. 271 0
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/index.ts
  23. 117 0
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/compress-and-upload-async.ts
  24. 103 0
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/create-page-snapshots-async.ts
  25. 79 0
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/export-pages-to-fs-async.ts
  26. 82 0
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export.ts
  27. 0 30
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export/errors.ts
  28. 0 462
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export/index.ts
  29. 0 231
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export/page-bulk-export-job-manager.spec.ts
  30. 0 125
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export/page-bulk-export-job-manager.ts
  31. 5 0
      apps/app/src/features/rate-limiter/config/index.ts
  32. 60 0
      apps/app/src/features/rate-limiter/middleware/consume-points.integ.ts
  33. 31 0
      apps/app/src/features/rate-limiter/middleware/consume-points.ts
  34. 11 43
      apps/app/src/features/rate-limiter/middleware/factory.ts
  35. 30 0
      apps/app/src/features/rate-limiter/middleware/rate-limiter-factory.ts
  36. 9 36
      apps/app/src/server/crowi/index.js
  37. 6 3
      apps/app/src/server/routes/apiv3/admin-home.ts
  38. 89 37
      apps/app/src/server/routes/apiv3/attachment.js
  39. 286 3
      apps/app/src/server/routes/apiv3/bookmark-folder.ts
  40. 3 2
      apps/app/src/server/routes/apiv3/page/index.ts
  41. 6 1
      apps/app/src/server/routes/login.js
  42. 12 0
      apps/app/src/server/service/config-loader.ts
  43. 7 2
      apps/app/src/server/service/cron.ts
  44. 61 0
      apps/app/src/server/util/runtime-versions.ts
  45. 1 2
      apps/slackbot-proxy/package.json
  46. 2 1
      package.json
  47. 0 1
      packages/core/package.json
  48. 0 1
      packages/presentation/package.json
  49. 0 1
      packages/remark-attachment-refs/package.json
  50. 0 1
      packages/remark-drawio/package.json
  51. 0 1
      packages/remark-lsx/package.json
  52. 1 2
      packages/slack/package.json
  53. 159 63
      pnpm-lock.yaml

+ 1 - 1
.devcontainer/app/devcontainer.json

@@ -40,7 +40,7 @@
       "settings": {
         "terminal.integrated.defaultProfile.linux": "bash"
       }
-    }
+    },
   },
 
   // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.

+ 2 - 1
.vscode/settings.json

@@ -25,6 +25,7 @@
   "typescript.enablePromptUseWorkspaceTsdk": true,
   "typescript.preferences.autoImportFileExcludePatterns": ["node_modules/*"],
   "typescript.validate.enable": true,
-  "typescript.surveys.enabled": false
+  "typescript.surveys.enabled": false,
 
+  "vitest.filesWatcherInclude": "**/*"
 }

+ 24 - 1
CHANGELOG.md

@@ -1,9 +1,32 @@
 # Changelog
 
-## [Unreleased](https://github.com/weseek/growi/compare/v7.1.2...HEAD)
+## [Unreleased](https://github.com/weseek/growi/compare/v7.1.4...HEAD)
 
 *Please do not manually update this file. We've automated the process.*
 
+## [v7.1.4](https://github.com/weseek/growi/compare/v7.1.3...v7.1.4) - 2024-11-26
+
+### 🐛 Bug Fixes
+
+* fix: Failed to export the page markdown (#9444) @miya
+
+## [v7.1.3](https://github.com/weseek/growi/compare/v7.1.2...v7.1.3) - 2024-11-26
+
+### 💎 Features
+
+* feat(ai): Set a rate limit for vector store rebuild (#9404) @miya
+
+### 🚀 Improvement
+
+* imprv: Fonts preload settings (#9432) @yuki-takei
+* imprv: Use stream.pipeline (#9361) @reiji-h
+
+### 🐛 Bug Fixes
+
+* fix: Retrieving runtime versions (#9438) @yuki-takei
+* fix: Notification for new user creation (#9434) @yuki-takei
+* fix:  Deleted pages appear in the page tree (#9337) @reiji-h
+
 ## [v7.1.2](https://github.com/weseek/growi/compare/v7.1.1...v7.1.2) - 2024-11-18
 
 ### 🚀 Improvement

+ 1 - 0
apps/app/bin/swagger-jsdoc/definition-apiv3.js

@@ -36,6 +36,7 @@ module.exports = {
       tags: [
         'Attachment',
         'Bookmarks',
+        'BookmarkFolders',
         'Page',
         'Pages',
         'Revisions',

+ 2 - 4
apps/app/package.json

@@ -1,6 +1,6 @@
 {
   "name": "@growi/app",
-  "version": "7.1.3-RC.0",
+  "version": "7.1.5-RC.0",
   "license": "MIT",
   "private": "true",
   "scripts": {
@@ -115,7 +115,6 @@
     "ejs": "^3.1.10",
     "esa-node": "^0.2.2",
     "escape-string-regexp": "^4.0.0",
-    "eslint-plugin-regex": "^1.8.0",
     "expose-gc": "^1.0.0",
     "express": "^4.20.0",
     "express-bunyan-logger": "^1.3.3",
@@ -161,7 +160,7 @@
     "next": "^14.2.13",
     "next-dynamic-loading-props": "^0.1.1",
     "next-i18next": "^15.3.1",
-    "next-superjson": "^0.0.4",
+    "next-superjson": "^1.0.7",
     "next-themes": "^0.2.1",
     "nocache": "^4.0.0",
     "node-cron": "^3.0.2",
@@ -283,7 +282,6 @@
     "downshift": "^8.2.3",
     "eazy-logger": "^3.1.0",
     "eslint-plugin-jest": "^26.5.3",
-    "eslint-plugin-regex": "^1.8.0",
     "fslightbox-react": "^1.7.6",
     "handsontable": "=6.2.2",
     "happy-dom": "^15.7.4",

+ 1 - 1
apps/app/public/static/locales/en_US/admin.json

@@ -1141,7 +1141,7 @@
   },
   "ai_integration": {
     "ai_integration": "AI Integration",
-    "disable_mode_explanation": "Currently, AI integration is disabled. To enable it, please set the environment variable <code>AI_ENABLED</code> to true.",
+    "disable_mode_explanation": "Currently, AI integration is disabled. To enable it, configure the <code>AI_ENABLED</code> environment variable along with the required additional variables.<br><br>For details, please refer to the <a target='blank' rel='noopener noreferrer' href={{documentationUrl}}en/guide/features/ai-knowledge-assistant.html>documentation</a>.",
     "ai_search_management": "AI search management",
     "rebuild_vector_store": "Rebuild Vector Store",
     "rebuild_vector_store_label": "Rebuild",

+ 1 - 1
apps/app/public/static/locales/fr_FR/admin.json

@@ -1140,7 +1140,7 @@
   },
   "ai_integration": {
     "ai_integration": "Intégration de l'IA",
-    "disable_mode_explanation": "Actuellement, l'intégration de l'IA est désactivée. Pour l'activer, veuillez définir la variable d'environnement <code>AI_ENABLED</code> sur true",
+    "disable_mode_explanation": "Actuellement, l'intégration AI est désactivée. Pour l'activer, configurez la variable d'environnement <code>AI_ENABLED</code> ainsi que les autres variables nécessaires.<br><br>Pour plus de détails, veuillez consulter la <a target='blank' rel='noopener noreferrer' href={{documentationUrl}}en/guide/features/ai-knowledge-assistant.html>documentation</a>.",
     "ai_search_management": "Gestion de la recherche par l'IA",
     "rebuild_vector_store": "Reconstruire le magasin Vector",
     "rebuild_vector_store_label": "Reconstruire",

+ 1 - 1
apps/app/public/static/locales/ja_JP/admin.json

@@ -1151,7 +1151,7 @@
   },
   "ai_integration": {
     "ai_integration": "AI 連携",
-    "disable_mode_explanation": "現在、AI 連携は無効になっています。有効にする場合は環境変数 <code>AI_ENABLED</code> を true に設定してください。",
+    "disable_mode_explanation": "現在、AI 連携は無効になっています。有効にする場合は環境変数 <code>AI_ENABLED</code> の他、必要な環境変数を設定してください。<br><br>詳細は<a target='blank' rel='noopener noreferrer' href={{documentationUrl}}ja/guide/features/ai-knowledge-assistant.html>ドキュメント</a>を参照してください。",
     "ai_search_management": "AI 検索管理",
     "rebuild_vector_store": "Vector Store のリビルド",
     "rebuild_vector_store_label": "リビルド",

+ 1 - 1
apps/app/public/static/locales/zh_CN/admin.json

@@ -1150,7 +1150,7 @@
   },
   "ai_integration": {
     "ai_integration": "AI 集成",
-    "disable_mode_explanation": "目前,AI 集成已禁用。要启用它,请将环境变量 <code>AI_ENABLED</code> 设置为 true",
+    "disable_mode_explanation": "目前,AI 集成已被禁用。若要启用,请配置 <code>AI_ENABLED</code> 环境变量以及其他必要的变量。<br><br>详细信息请参考<a target='blank' rel='noopener noreferrer' href={{documentationUrl}}en/guide/features/ai-knowledge-assistant.html>文档</a>。",
     "ai_search_management": "AI 搜索管理",
     "rebuild_vector_store": "重建矢量商店",
     "rebuild_vector_store_label": "重建",

+ 1 - 0
apps/app/src/client/components/StaffCredit/StaffCredit.tsx

@@ -21,6 +21,7 @@ const logger = loggerFactory('growi:cli:StaffCredit');
 const pressStart2P = localFont({
   src: '../../../../resource/fonts/PressStart2P-latin.woff2',
   display: 'block',
+  preload: false,
 });
 
 

+ 1 - 10
apps/app/src/client/components/TreeItem/TreeItemLayout.tsx

@@ -36,7 +36,7 @@ export const TreeItemLayout: FC<TreeItemLayoutProps> = (props) => {
 
   const { page, children } = itemNode;
 
-  const [currentChildren, setCurrentChildren] = useState(children);
+  const [currentChildren, setCurrentChildren] = useState<ItemNode[]>(children);
   const [isOpen, setIsOpen] = useState(_isOpen);
 
   const { data } = useSWRxPageChildren(isOpen ? page._id : null);
@@ -87,15 +87,6 @@ export const TreeItemLayout: FC<TreeItemLayoutProps> = (props) => {
     if (hasChildren()) setIsOpen(true);
   }, [hasChildren]);
 
-  /*
-   * Make sure itemNode.children and currentChildren are synced
-   */
-  useEffect(() => {
-    if (children.length > currentChildren.length) {
-      setCurrentChildren(children);
-    }
-  }, [children, currentChildren.length, targetPathOrId]);
-
   /*
    * When swr fetch succeeded
    */

+ 1 - 0
apps/app/src/components/FontFamily/use-material-symbols-outlined.tsx

@@ -6,6 +6,7 @@ const materialSymbolsOutlined = localFont({
   src: '../../../resource/fonts/MaterialSymbolsOutlined-opsz,wght,FILL@20..48,300,0..1.woff2',
   adjustFontFallback: false,
   display: 'block',
+  preload: false,
 });
 
 export const useMaterialSymbolsOutlined: DefineStyle = () => (

+ 4 - 2
apps/app/src/components/FontFamily/use-source-han-code-jp.tsx

@@ -4,11 +4,13 @@ import type { DefineStyle } from './types';
 
 const sourceHanCodeJPSubsetMain = localFont({
   src: '../../../resource/fonts/SourceHanCodeJP-Regular-subset-main.woff2',
-  display: 'optional',
+  display: 'swap',
+  preload: false,
 });
 const sourceHanCodeJPSubsetJis2 = localFont({
   src: '../../../resource/fonts/SourceHanCodeJP-Regular-subset-jis2.woff2',
-  display: 'optional',
+  display: 'swap',
+  preload: false,
 });
 
 export const useSourceHanCodeJP: DefineStyle = () => (

+ 4 - 1
apps/app/src/features/openai/client/components/AiIntegration/AiIntegrationDisableMode.tsx

@@ -3,8 +3,11 @@ import React from 'react';
 
 import { useTranslation } from 'react-i18next';
 
+import { useGrowiDocumentationUrl } from '~/stores-universal/context';
+
 export const AiIntegrationDisableMode: FC = () => {
   const { t } = useTranslation('admin');
+  const { data: documentationUrl } = useGrowiDocumentationUrl();
 
   return (
     <div className="ccontainer-lg">
@@ -17,7 +20,7 @@ export const AiIntegrationDisableMode: FC = () => {
               <h1 className="text-center">{t('ai_integration.ai_integration')}</h1>
               <h3
                 // eslint-disable-next-line react/no-danger
-                dangerouslySetInnerHTML={{ __html: t('ai_integration.disable_mode_explanation') }}
+                dangerouslySetInnerHTML={{ __html: t('ai_integration.disable_mode_explanation', { documentationUrl }) }}
               />
             </div>
           </div>

+ 2 - 0
apps/app/src/features/page-bulk-export/interfaces/page-bulk-export.ts

@@ -36,7 +36,9 @@ export interface IPageBulkExportJob {
   completedAt?: Date, // the date at which job was completed
   attachment?: Ref<IAttachment>,
   status: PageBulkExportJobStatus,
+  statusOnPreviousCronExec?: PageBulkExportJobStatus, // status on previous cron execution
   revisionListHash?: string, // Hash created from the list of revision IDs. Used to detect existing duplicate uploads.
+  restartFlag: boolean, // flag to restart the job
   createdAt?: Date,
   updatedAt?: Date
 }

+ 4 - 0
apps/app/src/features/page-bulk-export/server/models/page-bulk-export-job.ts

@@ -21,6 +21,10 @@ const pageBulkExportJobSchema = new Schema<PageBulkExportJobDocument>({
   status: {
     type: String, enum: Object.values(PageBulkExportJobStatus), required: true, default: PageBulkExportJobStatus.initializing,
   },
+  statusOnPreviousCronExec: {
+    type: String, enum: Object.values(PageBulkExportJobStatus),
+  },
+  restartFlag: { type: Boolean, required: true, default: false },
   revisionListHash: { type: String },
 }, { timestamps: true });
 

+ 2 - 7
apps/app/src/features/page-bulk-export/server/routes/apiv3/page-bulk-export.ts

@@ -7,8 +7,7 @@ import type Crowi from '~/server/crowi';
 import type { ApiV3Response } from '~/server/routes/apiv3/interfaces/apiv3-response';
 import loggerFactory from '~/utils/logger';
 
-import { pageBulkExportService } from '../../service/page-bulk-export';
-import { DuplicateBulkExportJobError } from '../../service/page-bulk-export/errors';
+import { pageBulkExportService, DuplicateBulkExportJobError } from '../../service/page-bulk-export';
 
 const logger = loggerFactory('growi:routes:apiv3:page-bulk-export');
 
@@ -36,13 +35,9 @@ module.exports = (crowi: Crowi): Router => {
     }
 
     const { path, format, restartJob } = req.body;
-    const activityParameters = {
-      ip: req.ip,
-      endpoint: req.originalUrl,
-    };
 
     try {
-      await pageBulkExportService?.createAndExecuteOrRestartBulkExportJob(path, req.user, activityParameters, restartJob);
+      await pageBulkExportService?.createOrResetBulkExportJob(path, req.user, restartJob);
       return res.apiv3({}, 204);
     }
     catch (err) {

+ 38 - 0
apps/app/src/features/page-bulk-export/server/service/check-page-bulk-export-job-in-progress-cron.ts

@@ -0,0 +1,38 @@
+import { configManager } from '~/server/service/config-manager';
+import CronService from '~/server/service/cron';
+import loggerFactory from '~/utils/logger';
+
+import { PageBulkExportJobInProgressStatus } from '../../interfaces/page-bulk-export';
+import PageBulkExportJob from '../models/page-bulk-export-job';
+
+import { pageBulkExportJobCronService } from './page-bulk-export-job-cron';
+
+const logger = loggerFactory('growi:service:check-page-bulk-export-job-in-progress-cron');
+
+/**
+ * Manages cronjob which checks if PageBulkExportJob in progress exists.
+ * If it does, and PageBulkExportJobCronService is not running, start PageBulkExportJobCronService
+ */
+class CheckPageBulkExportJobInProgressCronService extends CronService {
+
+  override getCronSchedule(): string {
+    return configManager.getConfig('crowi', 'app:checkPageBulkExportJobInProgressCronSchedule');
+  }
+
+  override async executeJob(): Promise<void> {
+    const pageBulkExportJobInProgress = await PageBulkExportJob.findOne({
+      $or: Object.values(PageBulkExportJobInProgressStatus).map(status => ({ status })),
+    });
+    const pageBulkExportInProgressExists = pageBulkExportJobInProgress != null;
+
+    if (pageBulkExportInProgressExists && !pageBulkExportJobCronService?.isJobRunning()) {
+      pageBulkExportJobCronService?.startCron();
+    }
+    else if (!pageBulkExportInProgressExists) {
+      pageBulkExportJobCronService?.stopCron();
+    }
+  }
+
+}
+
+export const checkPageBulkExportJobInProgressCronService = new CheckPageBulkExportJobInProgressCronService(); // singleton instance

+ 11 - 11
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron.integ.ts → apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-clean-up-cron.integ.ts

@@ -5,7 +5,7 @@ import { configManager } from '~/server/service/config-manager';
 import { PageBulkExportFormat, PageBulkExportJobStatus } from '../../interfaces/page-bulk-export';
 import PageBulkExportJob from '../models/page-bulk-export-job';
 
-import instanciatePageBulkExportJobCronService, { pageBulkExportJobCronService } from './page-bulk-export-job-cron';
+import instanciatePageBulkExportJobCleanUpCronService, { pageBulkExportJobCleanUpCronService } from './page-bulk-export-job-clean-up-cron';
 
 // TODO: use actual user model after ~/server/models/user.js becomes importable in vitest
 // ref: https://github.com/vitest-dev/vitest/issues/846
@@ -18,25 +18,25 @@ const userSchema = new mongoose.Schema({
 });
 const User = mongoose.model('User', userSchema);
 
-vi.mock('./page-bulk-export', () => {
+vi.mock('./page-bulk-export-job-cron', () => {
   return {
-    pageBulkExportService: {
+    pageBulkExportJobCronService: {
       cleanUpExportJobResources: vi.fn(() => Promise.resolve()),
     },
   };
 });
 
-describe('PageBulkExportJobCronService', () => {
+describe('PageBulkExportJobCleanUpCronService', () => {
   const crowi = { event: () => {} };
   let user;
 
   beforeAll(async() => {
     user = await User.create({
-      name: 'Example for PageBulkExportJobCronService Test',
-      username: 'page bulk export job cron test user',
-      email: 'bulkExportCronTestUser@example.com',
+      name: 'Example for PageBulkExportJobCleanUpCronService Test',
+      username: 'page bulk export job cleanup cron test user',
+      email: 'bulkExportCleanUpCronTestUser@example.com',
     });
-    instanciatePageBulkExportJobCronService(crowi);
+    instanciatePageBulkExportJobCleanUpCronService(crowi);
   });
 
   beforeEach(async() => {
@@ -87,7 +87,7 @@ describe('PageBulkExportJobCronService', () => {
       expect(await PageBulkExportJob.find()).toHaveLength(4);
 
       // act
-      await pageBulkExportJobCronService?.deleteExpiredExportJobs();
+      await pageBulkExportJobCleanUpCronService?.deleteExpiredExportJobs();
       const jobs = await PageBulkExportJob.find();
 
       // assert
@@ -135,7 +135,7 @@ describe('PageBulkExportJobCronService', () => {
       expect(await PageBulkExportJob.find()).toHaveLength(4);
 
       // act
-      await pageBulkExportJobCronService?.deleteDownloadExpiredExportJobs();
+      await pageBulkExportJobCleanUpCronService?.deleteDownloadExpiredExportJobs();
       const jobs = await PageBulkExportJob.find();
 
       // assert
@@ -167,7 +167,7 @@ describe('PageBulkExportJobCronService', () => {
       expect(await PageBulkExportJob.find()).toHaveLength(3);
 
       // act
-      await pageBulkExportJobCronService?.deleteFailedExportJobs();
+      await pageBulkExportJobCleanUpCronService?.deleteFailedExportJobs();
       const jobs = await PageBulkExportJob.find();
 
       // assert

+ 16 - 16
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron.ts → apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-clean-up-cron.ts

@@ -8,14 +8,14 @@ import { PageBulkExportEnabledFileUploadTypes, PageBulkExportJobInProgressStatus
 import type { PageBulkExportJobDocument } from '../models/page-bulk-export-job';
 import PageBulkExportJob from '../models/page-bulk-export-job';
 
-import { pageBulkExportService } from './page-bulk-export';
+import { pageBulkExportJobCronService } from './page-bulk-export-job-cron';
 
-const logger = loggerFactory('growi:service:cron');
+const logger = loggerFactory('growi:service:page-bulk-export-job-clean-up-cron');
 
 /**
  * Manages cronjob which deletes unnecessary bulk export jobs
  */
-class PageBulkExportJobCronService extends CronService {
+class PageBulkExportJobCleanUpCronService extends CronService {
 
   crowi: any;
 
@@ -25,7 +25,7 @@ class PageBulkExportJobCronService extends CronService {
   }
 
   override getCronSchedule(): string {
-    return configManager.getConfig('crowi', 'app:pageBulkExportJobCronSchedule');
+    return configManager.getConfig('crowi', 'app:pageBulkExportJobCleanUpCronSchedule');
   }
 
   override async executeJob(): Promise<void> {
@@ -47,8 +47,8 @@ class PageBulkExportJobCronService extends CronService {
       createdAt: { $lt: new Date(Date.now() - exportJobExpirationSeconds * 1000) },
     });
 
-    if (pageBulkExportService != null) {
-      await this.cleanUpAndDeleteBulkExportJobs(expiredExportJobs, pageBulkExportService.cleanUpExportJobResources.bind(pageBulkExportService));
+    if (pageBulkExportJobCronService != null) {
+      await this.cleanUpAndDeleteBulkExportJobs(expiredExportJobs, pageBulkExportJobCronService.cleanUpExportJobResources.bind(pageBulkExportJobCronService));
     }
   }
 
@@ -63,8 +63,8 @@ class PageBulkExportJobCronService extends CronService {
       completedAt: { $lt: thresholdDate },
     });
 
-    const cleanup = async(job: PageBulkExportJobDocument) => {
-      await pageBulkExportService?.cleanUpExportJobResources(job);
+    const cleanUp = async(job: PageBulkExportJobDocument) => {
+      await pageBulkExportJobCronService?.cleanUpExportJobResources(job);
 
       const hasSameAttachmentAndDownloadNotExpired = await PageBulkExportJob.findOne({
         attachment: job.attachment,
@@ -77,7 +77,7 @@ class PageBulkExportJobCronService extends CronService {
       }
     };
 
-    await this.cleanUpAndDeleteBulkExportJobs(downloadExpiredExportJobs, cleanup);
+    await this.cleanUpAndDeleteBulkExportJobs(downloadExpiredExportJobs, cleanUp);
   }
 
   /**
@@ -86,22 +86,22 @@ class PageBulkExportJobCronService extends CronService {
   async deleteFailedExportJobs() {
     const failedExportJobs = await PageBulkExportJob.find({ status: PageBulkExportJobStatus.failed });
 
-    if (pageBulkExportService != null) {
-      await this.cleanUpAndDeleteBulkExportJobs(failedExportJobs, pageBulkExportService.cleanUpExportJobResources.bind(pageBulkExportService));
+    if (pageBulkExportJobCronService != null) {
+      await this.cleanUpAndDeleteBulkExportJobs(failedExportJobs, pageBulkExportJobCronService.cleanUpExportJobResources.bind(pageBulkExportJobCronService));
     }
   }
 
   async cleanUpAndDeleteBulkExportJobs(
       pageBulkExportJobs: HydratedDocument<PageBulkExportJobDocument>[],
-      cleanup: (job: PageBulkExportJobDocument) => Promise<void>,
+      cleanUp: (job: PageBulkExportJobDocument) => Promise<void>,
   ): Promise<void> {
-    const results = await Promise.allSettled(pageBulkExportJobs.map(job => cleanup(job)));
+    const results = await Promise.allSettled(pageBulkExportJobs.map(job => cleanUp(job)));
     results.forEach((result) => {
       if (result.status === 'rejected') logger.error(result.reason);
     });
 
     // Only batch delete jobs which have been successfully cleaned up
-    // Cleanup failed jobs will be retried in the next cron execution
+    // Clean up failed jobs will be retried in the next cron execution
     const cleanedUpJobs = pageBulkExportJobs.filter((_, index) => results[index].status === 'fulfilled');
     if (cleanedUpJobs.length > 0) {
       const cleanedUpJobIds = cleanedUpJobs.map(job => job._id);
@@ -112,7 +112,7 @@ class PageBulkExportJobCronService extends CronService {
 }
 
 // eslint-disable-next-line import/no-mutable-exports
-export let pageBulkExportJobCronService: PageBulkExportJobCronService | undefined; // singleton instance
+export let pageBulkExportJobCleanUpCronService: PageBulkExportJobCleanUpCronService | undefined; // singleton instance
 export default function instanciate(crowi): void {
-  pageBulkExportJobCronService = new PageBulkExportJobCronService(crowi);
+  pageBulkExportJobCleanUpCronService = new PageBulkExportJobCleanUpCronService(crowi);
 }

+ 15 - 0
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/errors.ts

@@ -0,0 +1,15 @@
+export class BulkExportJobExpiredError extends Error {
+
+  constructor() {
+    super('Bulk export job has expired');
+  }
+
+}
+
+export class BulkExportJobRestartedError extends Error {
+
+  constructor() {
+    super('Bulk export job has restarted');
+  }
+
+}

+ 271 - 0
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/index.ts

@@ -0,0 +1,271 @@
+import fs from 'fs';
+import type { Readable } from 'stream';
+
+import type { IUser } from '@growi/core';
+import { isPopulated, getIdForRef } from '@growi/core';
+import mongoose from 'mongoose';
+
+
+import type { SupportedActionType } from '~/interfaces/activity';
+import { SupportedAction, SupportedTargetModel } from '~/interfaces/activity';
+import type Crowi from '~/server/crowi';
+import type { ObjectIdLike } from '~/server/interfaces/mongoose-utils';
+import type { ActivityDocument } from '~/server/models/activity';
+import { configManager } from '~/server/service/config-manager';
+import CronService from '~/server/service/cron';
+import type { FileUploader } from '~/server/service/file-uploader';
+import { preNotifyService } from '~/server/service/pre-notify';
+import loggerFactory from '~/utils/logger';
+
+import { PageBulkExportJobInProgressStatus, PageBulkExportJobStatus } from '../../../interfaces/page-bulk-export';
+import type { PageBulkExportJobDocument } from '../../models/page-bulk-export-job';
+import PageBulkExportJob from '../../models/page-bulk-export-job';
+import PageBulkExportPageSnapshot from '../../models/page-bulk-export-page-snapshot';
+
+
+import { BulkExportJobExpiredError, BulkExportJobRestartedError } from './errors';
+import { compressAndUploadAsync } from './steps/compress-and-upload-async';
+import { createPageSnapshotsAsync } from './steps/create-page-snapshots-async';
+import { exportPagesToFsAsync } from './steps/export-pages-to-fs-async';
+
+
+const logger = loggerFactory('growi:service:page-bulk-export-job-cron');
+
+export interface IPageBulkExportJobCronService {
+  crowi: Crowi;
+  pageBatchSize: number;
+  maxPartSize: number;
+  compressExtension: string;
+  setStreamInExecution(jobId: ObjectIdLike, stream: Readable): void;
+  handlePipelineError(err: Error | null, pageBulkExportJob: PageBulkExportJobDocument): void;
+  notifyExportResultAndCleanUp(action: SupportedActionType, pageBulkExportJob: PageBulkExportJobDocument): Promise<void>;
+  getTmpOutputDir(pageBulkExportJob: PageBulkExportJobDocument): string;
+}
+
+/**
+ * Manages cronjob which proceeds PageBulkExportJobs in progress.
+ * If PageBulkExportJob finishes the current step, the next step will be started on the next cron execution.
+ */
+class PageBulkExportJobCronService extends CronService implements IPageBulkExportJobCronService {
+
+  crowi: Crowi;
+
+  activityEvent: any;
+
+  // multipart upload max part size
+  maxPartSize = 5 * 1024 * 1024; // 5MB
+
+  pageBatchSize = 100;
+
+  compressExtension = 'tar.gz';
+
+  // temporal path of local fs to output page files before upload
+  // TODO: If necessary, change to a proper path in https://redmine.weseek.co.jp/issues/149512
+  tmpOutputRootDir = '/tmp/page-bulk-export';
+
+  // Keep track of the stream executed for PageBulkExportJob to destroy it on job failure.
+  // The key is the id of a PageBulkExportJob.
+  private streamInExecutionMemo: {
+    [key: string]: Readable;
+  } = {};
+
+  private parallelExecLimit: number;
+
+  constructor(crowi: Crowi) {
+    super();
+    this.crowi = crowi;
+    this.activityEvent = crowi.event('activity');
+    this.parallelExecLimit = configManager.getConfig('crowi', 'app:pageBulkExportParallelExecLimit');
+  }
+
+  override getCronSchedule(): string {
+    return configManager.getConfig('crowi', 'app:pageBulkExportJobCronSchedule');
+  }
+
+  override async executeJob(): Promise<void> {
+    const pageBulkExportJobsInProgress = await PageBulkExportJob.find({
+      $or: Object.values(PageBulkExportJobInProgressStatus).map(status => ({ status })),
+    }).sort({ createdAt: 1 }).limit(this.parallelExecLimit);
+
+    pageBulkExportJobsInProgress.forEach((pageBulkExportJob) => {
+      this.proceedBulkExportJob(pageBulkExportJob);
+    });
+
+    if (pageBulkExportJobsInProgress.length === 0) {
+      this.stopCron();
+    }
+  }
+
+  /**
+   * Get the output directory on the fs to temporarily store page files before compressing and uploading
+   */
+  getTmpOutputDir(pageBulkExportJob: PageBulkExportJobDocument): string {
+    return `${this.tmpOutputRootDir}/${pageBulkExportJob._id}`;
+  }
+
+  /**
+   * Get the stream in execution for a job.
+   * A getter method that includes "undefined" in the return type
+   */
+  getStreamInExecution(jobId: ObjectIdLike): Readable | undefined {
+    return this.streamInExecutionMemo[jobId.toString()];
+  }
+
+  /**
+   * Set the stream in execution for a job
+   */
+  setStreamInExecution(jobId: ObjectIdLike, stream: Readable) {
+    this.streamInExecutionMemo[jobId.toString()] = stream;
+  }
+
+  /**
+   * Remove the stream in execution for a job
+   */
+  removeStreamInExecution(jobId: ObjectIdLike) {
+    delete this.streamInExecutionMemo[jobId.toString()];
+  }
+
+  /**
+   * Proceed the page bulk export job if the next step is executable
+   * @param pageBulkExportJob PageBulkExportJob in progress
+   */
+  async proceedBulkExportJob(pageBulkExportJob: PageBulkExportJobDocument) {
+    if (pageBulkExportJob.restartFlag) {
+      await this.cleanUpExportJobResources(pageBulkExportJob, true);
+      pageBulkExportJob.restartFlag = false;
+      pageBulkExportJob.status = PageBulkExportJobStatus.initializing;
+      pageBulkExportJob.statusOnPreviousCronExec = undefined;
+      await pageBulkExportJob.save();
+    }
+
+    // return if job is still the same status as the previous cron exec
+    if (pageBulkExportJob.status === pageBulkExportJob.statusOnPreviousCronExec) {
+      return;
+    }
+    const User = mongoose.model<IUser>('User');
+    try {
+      const user = await User.findById(getIdForRef(pageBulkExportJob.user));
+
+      // update statusOnPreviousCronExec before starting processes that updates status
+      pageBulkExportJob.statusOnPreviousCronExec = pageBulkExportJob.status;
+      await pageBulkExportJob.save();
+
+      if (pageBulkExportJob.status === PageBulkExportJobStatus.initializing) {
+        await createPageSnapshotsAsync.bind(this)(user, pageBulkExportJob);
+      }
+      else if (pageBulkExportJob.status === PageBulkExportJobStatus.exporting) {
+        exportPagesToFsAsync.bind(this)(pageBulkExportJob);
+      }
+      else if (pageBulkExportJob.status === PageBulkExportJobStatus.uploading) {
+        await compressAndUploadAsync.bind(this)(user, pageBulkExportJob);
+      }
+    }
+    catch (err) {
+      logger.error(err);
+      await this.notifyExportResultAndCleanUp(SupportedAction.ACTION_PAGE_BULK_EXPORT_FAILED, pageBulkExportJob);
+    }
+  }
+
+  /**
+   * Handle errors that occurred inside a stream pipeline
+   * @param err error
+   * @param pageBulkExportJob PageBulkExportJob executed in the pipeline
+   */
+  async handlePipelineError(err: Error | null, pageBulkExportJob: PageBulkExportJobDocument) {
+    if (err == null) return;
+
+    if (err instanceof BulkExportJobExpiredError) {
+      logger.error(err);
+      await this.notifyExportResultAndCleanUp(SupportedAction.ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED, pageBulkExportJob);
+    }
+    else if (err instanceof BulkExportJobRestartedError) {
+      logger.info(err.message);
+      await this.cleanUpExportJobResources(pageBulkExportJob);
+    }
+    else {
+      logger.error(err);
+      await this.notifyExportResultAndCleanUp(SupportedAction.ACTION_PAGE_BULK_EXPORT_FAILED, pageBulkExportJob);
+    }
+  }
+
+  /**
+   * Notify the user of the export result, and cleanup the resources used in the export process
+   * @param action whether the export was successful
+   * @param pageBulkExportJob the page bulk export job
+   */
+  async notifyExportResultAndCleanUp(
+      action: SupportedActionType,
+      pageBulkExportJob: PageBulkExportJobDocument,
+  ): Promise<void> {
+    pageBulkExportJob.status = action === SupportedAction.ACTION_PAGE_BULK_EXPORT_COMPLETED
+      ? PageBulkExportJobStatus.completed : PageBulkExportJobStatus.failed;
+
+    try {
+      await pageBulkExportJob.save();
+      await this.notifyExportResult(pageBulkExportJob, action);
+    }
+    catch (err) {
+      logger.error(err);
+    }
+    // execute independently of notif process resolve/reject
+    await this.cleanUpExportJobResources(pageBulkExportJob);
+  }
+
+  /**
+   * Do the following in parallel:
+   * - delete page snapshots
+   * - remove the temporal output directory
+   * - abort multipart upload
+   */
+  async cleanUpExportJobResources(pageBulkExportJob: PageBulkExportJobDocument, restarted = false) {
+    const streamInExecution = this.getStreamInExecution(pageBulkExportJob._id);
+    if (streamInExecution != null) {
+      if (restarted) {
+        streamInExecution.destroy(new BulkExportJobRestartedError());
+      }
+      else {
+        streamInExecution.destroy(new BulkExportJobExpiredError());
+      }
+    }
+    this.removeStreamInExecution(pageBulkExportJob._id);
+
+    const promises = [
+      PageBulkExportPageSnapshot.deleteMany({ pageBulkExportJob }),
+      fs.promises.rm(this.getTmpOutputDir(pageBulkExportJob), { recursive: true, force: true }),
+    ];
+
+    const fileUploadService: FileUploader = this.crowi.fileUploadService;
+    if (pageBulkExportJob.uploadKey != null && pageBulkExportJob.uploadId != null) {
+      promises.push(fileUploadService.abortPreviousMultipartUpload(pageBulkExportJob.uploadKey, pageBulkExportJob.uploadId));
+    }
+
+    const results = await Promise.allSettled(promises);
+    results.forEach((result) => {
+      if (result.status === 'rejected') logger.error(result.reason);
+    });
+  }
+
+  private async notifyExportResult(
+      pageBulkExportJob: PageBulkExportJobDocument, action: SupportedActionType,
+  ) {
+    const activity = await this.crowi.activityService.createActivity({
+      action,
+      targetModel: SupportedTargetModel.MODEL_PAGE_BULK_EXPORT_JOB,
+      target: pageBulkExportJob,
+      user: pageBulkExportJob.user,
+      snapshot: {
+        username: isPopulated(pageBulkExportJob.user) ? pageBulkExportJob.user.username : '',
+      },
+    });
+    const getAdditionalTargetUsers = async(activity: ActivityDocument) => [activity.user];
+    const preNotify = preNotifyService.generatePreNotify(activity, getAdditionalTargetUsers);
+    this.activityEvent.emit('updated', activity, pageBulkExportJob, preNotify);
+  }
+
+}
+
+// eslint-disable-next-line import/no-mutable-exports
+export let pageBulkExportJobCronService: PageBulkExportJobCronService | undefined; // singleton instance
+export default function instanciate(crowi: Crowi): void {
+  pageBulkExportJobCronService = new PageBulkExportJobCronService(crowi);
+}

+ 117 - 0
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/compress-and-upload-async.ts

@@ -0,0 +1,117 @@
+import { Writable, pipeline } from 'stream';
+
+import type { Archiver } from 'archiver';
+import archiver from 'archiver';
+import gc from 'expose-gc/function';
+
+import { PageBulkExportJobStatus } from '~/features/page-bulk-export/interfaces/page-bulk-export';
+import { SupportedAction } from '~/interfaces/activity';
+import { AttachmentType, FilePathOnStoragePrefix } from '~/server/interfaces/attachment';
+import type { IAttachmentDocument } from '~/server/models/attachment';
+import { Attachment } from '~/server/models/attachment';
+import type { FileUploader } from '~/server/service/file-uploader';
+import type { IMultipartUploader } from '~/server/service/file-uploader/multipart-uploader';
+import { getBufferToFixedSizeTransform } from '~/server/util/stream';
+import loggerFactory from '~/utils/logger';
+
+import type { IPageBulkExportJobCronService } from '..';
+import type { PageBulkExportJobDocument } from '../../../models/page-bulk-export-job';
+
+const logger = loggerFactory('growi:service:page-bulk-export-job-cron:compress-and-upload-async');
+
+function setUpPageArchiver(): Archiver {
+  const pageArchiver = archiver('tar', {
+    gzip: true,
+  });
+
+  // good practice to catch warnings (ie stat failures and other non-blocking errors)
+  pageArchiver.on('warning', (err) => {
+    if (err.code === 'ENOENT') logger.error(err);
+    else throw err;
+  });
+
+  return pageArchiver;
+}
+
+function getMultipartUploadWritable(
+    this: IPageBulkExportJobCronService,
+    multipartUploader: IMultipartUploader,
+    pageBulkExportJob: PageBulkExportJobDocument,
+    attachment: IAttachmentDocument,
+): Writable {
+  let partNumber = 1;
+
+  return new Writable({
+    write: async(part: Buffer, encoding, callback) => {
+      try {
+        await multipartUploader.uploadPart(part, partNumber);
+        partNumber += 1;
+        // First aid to prevent unexplained memory leaks
+        logger.info('global.gc() invoked.');
+        gc();
+      }
+      catch (err) {
+        await multipartUploader.abortUpload();
+        callback(err);
+        return;
+      }
+      callback();
+    },
+    final: async(callback) => {
+      try {
+        await multipartUploader.completeUpload();
+
+        const fileSize = await multipartUploader.getUploadedFileSize();
+        attachment.fileSize = fileSize;
+        await attachment.save();
+
+        pageBulkExportJob.completedAt = new Date();
+        pageBulkExportJob.attachment = attachment._id;
+        pageBulkExportJob.status = PageBulkExportJobStatus.completed;
+        await pageBulkExportJob.save();
+
+        await this.notifyExportResultAndCleanUp(SupportedAction.ACTION_PAGE_BULK_EXPORT_COMPLETED, pageBulkExportJob);
+      }
+      catch (err) {
+        callback(err);
+        return;
+      }
+      callback();
+    },
+  });
+}
+
+
+/**
+ * Execute a pipeline that reads the page files from the temporal fs directory, compresses them, and uploads to the cloud storage
+ */
+export async function compressAndUploadAsync(this: IPageBulkExportJobCronService, user, pageBulkExportJob: PageBulkExportJobDocument): Promise<void> {
+  const pageArchiver = setUpPageArchiver();
+  const bufferToPartSizeTransform = getBufferToFixedSizeTransform(this.maxPartSize);
+
+  if (pageBulkExportJob.revisionListHash == null) throw new Error('revisionListHash is not set');
+  const originalName = `${pageBulkExportJob.revisionListHash}.${this.compressExtension}`;
+  const attachment = Attachment.createWithoutSave(null, user, originalName, this.compressExtension, 0, AttachmentType.PAGE_BULK_EXPORT);
+  const uploadKey = `${FilePathOnStoragePrefix.pageBulkExport}/${attachment.fileName}`;
+
+  const fileUploadService: FileUploader = this.crowi.fileUploadService;
+  // if the process of uploading was interrupted, delete and start from the start
+  if (pageBulkExportJob.uploadKey != null && pageBulkExportJob.uploadId != null) {
+    await fileUploadService.abortPreviousMultipartUpload(pageBulkExportJob.uploadKey, pageBulkExportJob.uploadId);
+  }
+
+  // init multipart upload
+  const multipartUploader: IMultipartUploader = fileUploadService.createMultipartUploader(uploadKey, this.maxPartSize);
+  await multipartUploader.initUpload();
+  pageBulkExportJob.uploadKey = uploadKey;
+  pageBulkExportJob.uploadId = multipartUploader.uploadId;
+  await pageBulkExportJob.save();
+
+  const multipartUploadWritable = getMultipartUploadWritable.bind(this)(multipartUploader, pageBulkExportJob, attachment);
+
+  pipeline(pageArchiver, bufferToPartSizeTransform, multipartUploadWritable, (err) => {
+    this.handlePipelineError(err, pageBulkExportJob);
+  });
+  pageArchiver.directory(this.getTmpOutputDir(pageBulkExportJob), false);
+  pageArchiver.finalize();
+}

+ 103 - 0
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/create-page-snapshots-async.ts

@@ -0,0 +1,103 @@
+import { createHash } from 'crypto';
+import { Writable, pipeline } from 'stream';
+
+import { getIdForRef, getIdStringForRef } from '@growi/core';
+import type { IPage } from '@growi/core';
+import mongoose from 'mongoose';
+
+import { PageBulkExportJobStatus } from '~/features/page-bulk-export/interfaces/page-bulk-export';
+import { SupportedAction } from '~/interfaces/activity';
+import type { PageDocument, PageModel } from '~/server/models/page';
+
+import type { IPageBulkExportJobCronService } from '..';
+import type { PageBulkExportJobDocument } from '../../../models/page-bulk-export-job';
+import PageBulkExportJob from '../../../models/page-bulk-export-job';
+import PageBulkExportPageSnapshot from '../../../models/page-bulk-export-page-snapshot';
+
+async function reuseDuplicateExportIfExists(this: IPageBulkExportJobCronService, pageBulkExportJob: PageBulkExportJobDocument) {
+  const duplicateExportJob = await PageBulkExportJob.findOne({
+    user: pageBulkExportJob.user,
+    page: pageBulkExportJob.page,
+    format: pageBulkExportJob.format,
+    status: PageBulkExportJobStatus.completed,
+    revisionListHash: pageBulkExportJob.revisionListHash,
+  });
+  if (duplicateExportJob != null) {
+    // if an upload with the exact same contents exists, re-use the same attachment of that upload
+    pageBulkExportJob.attachment = duplicateExportJob.attachment;
+    pageBulkExportJob.status = PageBulkExportJobStatus.completed;
+    await pageBulkExportJob.save();
+
+    await this.notifyExportResultAndCleanUp(SupportedAction.ACTION_PAGE_BULK_EXPORT_COMPLETED, pageBulkExportJob);
+  }
+}
+
+/**
+ * Start a pipeline that creates a snapshot for each page that is to be exported in the pageBulkExportJob.
+ * 'revisionListHash' is calulated and saved to the pageBulkExportJob at the end of the pipeline.
+ */
+export async function createPageSnapshotsAsync(this: IPageBulkExportJobCronService, user, pageBulkExportJob: PageBulkExportJobDocument): Promise<void> {
+  const Page = mongoose.model<IPage, PageModel>('Page');
+
+  // if the process of creating snapshots was interrupted, delete the snapshots and create from the start
+  await PageBulkExportPageSnapshot.deleteMany({ pageBulkExportJob });
+
+  const basePage = await Page.findById(getIdForRef(pageBulkExportJob.page));
+  if (basePage == null) {
+    throw new Error('Base page not found');
+  }
+
+  const revisionListHash = createHash('sha256');
+
+  // create a Readable for pages to be exported
+  const { PageQueryBuilder } = Page;
+  const builder = await new PageQueryBuilder(Page.find())
+    .addConditionToListWithDescendants(basePage.path)
+    .addViewerCondition(user);
+  const pagesReadable = builder
+    .query
+    .lean()
+    .cursor({ batchSize: this.pageBatchSize });
+
+  // create a Writable that creates a snapshot for each page
+  const pageSnapshotsWritable = new Writable({
+    objectMode: true,
+    write: async(page: PageDocument, encoding, callback) => {
+      try {
+        if (page.revision != null) {
+          revisionListHash.update(getIdStringForRef(page.revision));
+        }
+        await PageBulkExportPageSnapshot.create({
+          pageBulkExportJob,
+          path: page.path,
+          revision: page.revision,
+        });
+      }
+      catch (err) {
+        callback(err);
+        return;
+      }
+      callback();
+    },
+    final: async(callback) => {
+      try {
+        pageBulkExportJob.revisionListHash = revisionListHash.digest('hex');
+        pageBulkExportJob.status = PageBulkExportJobStatus.exporting;
+        await pageBulkExportJob.save();
+
+        await reuseDuplicateExportIfExists.bind(this)(pageBulkExportJob);
+      }
+      catch (err) {
+        callback(err);
+        return;
+      }
+      callback();
+    },
+  });
+
+  this.setStreamInExecution(pageBulkExportJob._id, pagesReadable);
+
+  pipeline(pagesReadable, pageSnapshotsWritable, (err) => {
+    this.handlePipelineError(err, pageBulkExportJob);
+  });
+}

+ 79 - 0
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/export-pages-to-fs-async.ts

@@ -0,0 +1,79 @@
+import fs from 'fs';
+import path from 'path';
+import { Writable, pipeline } from 'stream';
+
+import { isPopulated } from '@growi/core';
+import { getParentPath, normalizePath } from '@growi/core/dist/utils/path-utils';
+
+import { PageBulkExportFormat, PageBulkExportJobStatus } from '~/features/page-bulk-export/interfaces/page-bulk-export';
+
+import type { IPageBulkExportJobCronService } from '..';
+import type { PageBulkExportJobDocument } from '../../../models/page-bulk-export-job';
+import type { PageBulkExportPageSnapshotDocument } from '../../../models/page-bulk-export-page-snapshot';
+import PageBulkExportPageSnapshot from '../../../models/page-bulk-export-page-snapshot';
+
+/**
+ * Get a Writable that writes the page body temporarily to fs
+ */
+function getPageWritable(this: IPageBulkExportJobCronService, pageBulkExportJob: PageBulkExportJobDocument): Writable {
+  const outputDir = this.getTmpOutputDir(pageBulkExportJob);
+  return new Writable({
+    objectMode: true,
+    write: async(page: PageBulkExportPageSnapshotDocument, encoding, callback) => {
+      try {
+        const revision = page.revision;
+
+        if (revision != null && isPopulated(revision)) {
+          const markdownBody = revision.body;
+          const pathNormalized = `${normalizePath(page.path)}.${PageBulkExportFormat.md}`;
+          const fileOutputPath = path.join(outputDir, pathNormalized);
+          const fileOutputParentPath = getParentPath(fileOutputPath);
+
+          await fs.promises.mkdir(fileOutputParentPath, { recursive: true });
+          await fs.promises.writeFile(fileOutputPath, markdownBody);
+          pageBulkExportJob.lastExportedPagePath = page.path;
+          await pageBulkExportJob.save();
+        }
+      }
+      catch (err) {
+        callback(err);
+        return;
+      }
+      callback();
+    },
+    final: async(callback) => {
+      try {
+        pageBulkExportJob.status = PageBulkExportJobStatus.uploading;
+        await pageBulkExportJob.save();
+      }
+      catch (err) {
+        callback(err);
+        return;
+      }
+      callback();
+    },
+  });
+}
+
+/**
+ * Export pages to the file system before compressing and uploading to the cloud storage.
+ * The export will resume from the last exported page if the process was interrupted.
+ */
+export function exportPagesToFsAsync(this: IPageBulkExportJobCronService, pageBulkExportJob: PageBulkExportJobDocument): void {
+  const findQuery = pageBulkExportJob.lastExportedPagePath != null ? {
+    pageBulkExportJob,
+    path: { $gt: pageBulkExportJob.lastExportedPagePath },
+  } : { pageBulkExportJob };
+  const pageSnapshotsReadable = PageBulkExportPageSnapshot
+    .find(findQuery)
+    .populate('revision').sort({ path: 1 }).lean()
+    .cursor({ batchSize: this.pageBatchSize });
+
+  const pagesWritable = getPageWritable.bind(this)(pageBulkExportJob);
+
+  this.setStreamInExecution(pageBulkExportJob._id, pageSnapshotsReadable);
+
+  pipeline(pageSnapshotsReadable, pagesWritable, (err) => {
+    this.handlePipelineError(err, pageBulkExportJob);
+  });
+}

+ 82 - 0
apps/app/src/features/page-bulk-export/server/service/page-bulk-export.ts

@@ -0,0 +1,82 @@
+import {
+  type IPage, SubscriptionStatusType,
+} from '@growi/core';
+import type { HydratedDocument } from 'mongoose';
+import mongoose from 'mongoose';
+
+
+import { SupportedTargetModel } from '~/interfaces/activity';
+import type { PageModel } from '~/server/models/page';
+import Subscription from '~/server/models/subscription';
+import loggerFactory from '~/utils/logger';
+
+import { PageBulkExportFormat, PageBulkExportJobInProgressStatus, PageBulkExportJobStatus } from '../../interfaces/page-bulk-export';
+import type { PageBulkExportJobDocument } from '../models/page-bulk-export-job';
+import PageBulkExportJob from '../models/page-bulk-export-job';
+
+const logger = loggerFactory('growi:services:PageBulkExportService');
+
+export class DuplicateBulkExportJobError extends Error {
+
+  duplicateJob: HydratedDocument<PageBulkExportJobDocument>;
+
+  constructor(duplicateJob: HydratedDocument<PageBulkExportJobDocument>) {
+    super('Duplicate bulk export job is in progress');
+    this.duplicateJob = duplicateJob;
+  }
+
+}
+
+export interface IPageBulkExportService {
+  createOrResetBulkExportJob: (basePagePath: string, currentUser, restartJob?: boolean) => Promise<void>;
+}
+
+class PageBulkExportService implements IPageBulkExportService {
+
+  // temporal path of local fs to output page files before upload
+  // TODO: If necessary, change to a proper path in https://redmine.weseek.co.jp/issues/149512
+  tmpOutputRootDir = '/tmp/page-bulk-export';
+
+  /**
+   * Create a new page bulk export job or reset the existing one
+   */
+  async createOrResetBulkExportJob(basePagePath: string, currentUser, restartJob = false): Promise<void> {
+    const Page = mongoose.model<IPage, PageModel>('Page');
+    const basePage = await Page.findByPathAndViewer(basePagePath, currentUser, null, true);
+
+    if (basePage == null) {
+      throw new Error('Base page not found or not accessible');
+    }
+
+    const format = PageBulkExportFormat.md;
+    const duplicatePageBulkExportJobInProgress: HydratedDocument<PageBulkExportJobDocument> | null = await PageBulkExportJob.findOne({
+      user: currentUser,
+      page: basePage,
+      format,
+      $or: Object.values(PageBulkExportJobInProgressStatus).map(status => ({ status })),
+    });
+    if (duplicatePageBulkExportJobInProgress != null) {
+      if (restartJob) {
+        this.resetBulkExportJob(duplicatePageBulkExportJobInProgress);
+        return;
+      }
+      throw new DuplicateBulkExportJobError(duplicatePageBulkExportJobInProgress);
+    }
+    const pageBulkExportJob: HydratedDocument<PageBulkExportJobDocument> = await PageBulkExportJob.create({
+      user: currentUser, page: basePage, format, status: PageBulkExportJobStatus.initializing,
+    });
+
+    await Subscription.upsertSubscription(currentUser, SupportedTargetModel.MODEL_PAGE_BULK_EXPORT_JOB, pageBulkExportJob, SubscriptionStatusType.SUBSCRIBE);
+  }
+
+  /**
+   * Reset page bulk export job in progress
+   */
+  async resetBulkExportJob(pageBulkExportJob: HydratedDocument<PageBulkExportJobDocument>): Promise<void> {
+    pageBulkExportJob.restartFlag = true;
+    await pageBulkExportJob.save();
+  }
+
+}
+
+export const pageBulkExportService: PageBulkExportService = new PageBulkExportService(); // singleton instance

+ 0 - 30
apps/app/src/features/page-bulk-export/server/service/page-bulk-export/errors.ts

@@ -1,30 +0,0 @@
-import type { HydratedDocument } from 'mongoose';
-
-import type { PageBulkExportJobDocument } from '../../models/page-bulk-export-job';
-
-export class DuplicateBulkExportJobError extends Error {
-
-  duplicateJob: HydratedDocument<PageBulkExportJobDocument>;
-
-  constructor(duplicateJob: HydratedDocument<PageBulkExportJobDocument>) {
-    super('Duplicate bulk export job is in progress');
-    this.duplicateJob = duplicateJob;
-  }
-
-}
-
-export class BulkExportJobExpiredError extends Error {
-
-  constructor() {
-    super('Bulk export job has expired');
-  }
-
-}
-
-export class BulkExportJobRestartedError extends Error {
-
-  constructor() {
-    super('Bulk export job has restarted');
-  }
-
-}

+ 0 - 462
apps/app/src/features/page-bulk-export/server/service/page-bulk-export/index.ts

@@ -1,462 +0,0 @@
-import { createHash } from 'crypto';
-import fs from 'fs';
-import path from 'path';
-import { Writable } from 'stream';
-import { pipeline as pipelinePromise } from 'stream/promises';
-
-import type { IUser } from '@growi/core';
-import {
-  getIdForRef, getIdStringForRef, type IPage, isPopulated, SubscriptionStatusType,
-} from '@growi/core';
-import { getParentPath, normalizePath } from '@growi/core/dist/utils/path-utils';
-import type { Archiver } from 'archiver';
-import archiver from 'archiver';
-import gc from 'expose-gc/function';
-import type { HydratedDocument } from 'mongoose';
-import mongoose from 'mongoose';
-
-import type { SupportedActionType } from '~/interfaces/activity';
-import { SupportedAction, SupportedTargetModel } from '~/interfaces/activity';
-import { AttachmentType, FilePathOnStoragePrefix } from '~/server/interfaces/attachment';
-import type { ActivityDocument } from '~/server/models/activity';
-import type { IAttachmentDocument } from '~/server/models/attachment';
-import { Attachment } from '~/server/models/attachment';
-import type { PageModel, PageDocument } from '~/server/models/page';
-import Subscription from '~/server/models/subscription';
-import type { FileUploader } from '~/server/service/file-uploader';
-import type { IMultipartUploader } from '~/server/service/file-uploader/multipart-uploader';
-import { preNotifyService } from '~/server/service/pre-notify';
-import { getBufferToFixedSizeTransform } from '~/server/util/stream';
-import loggerFactory from '~/utils/logger';
-
-import { PageBulkExportFormat, PageBulkExportJobInProgressStatus, PageBulkExportJobStatus } from '../../../interfaces/page-bulk-export';
-import type { PageBulkExportJobDocument } from '../../models/page-bulk-export-job';
-import PageBulkExportJob from '../../models/page-bulk-export-job';
-import type { PageBulkExportPageSnapshotDocument } from '../../models/page-bulk-export-page-snapshot';
-import PageBulkExportPageSnapshot from '../../models/page-bulk-export-page-snapshot';
-
-import { BulkExportJobExpiredError, BulkExportJobRestartedError, DuplicateBulkExportJobError } from './errors';
-import { PageBulkExportJobManager } from './page-bulk-export-job-manager';
-
-
-const logger = loggerFactory('growi:services:PageBulkExportService');
-
-export type ActivityParameters ={
-  ip?: string;
-  endpoint: string;
-}
-
-export interface IPageBulkExportService {
-  executePageBulkExportJob: (pageBulkExportJob: HydratedDocument<PageBulkExportJobDocument>, activityParameters?: ActivityParameters) => Promise<void>
-}
-
-class PageBulkExportService implements IPageBulkExportService {
-
-  crowi: any;
-
-  activityEvent: any;
-
-  // multipart upload max part size
-  maxPartSize = 5 * 1024 * 1024; // 5MB
-
-  pageBatchSize = 100;
-
-  compressExtension = 'tar.gz';
-
-  pageBulkExportJobManager: PageBulkExportJobManager;
-
-  // temporal path of local fs to output page files before upload
-  // TODO: If necessary, change to a proper path in https://redmine.weseek.co.jp/issues/149512
-  tmpOutputRootDir = '/tmp/page-bulk-export';
-
-  pageModel: PageModel;
-
-  constructor(crowi) {
-    this.crowi = crowi;
-    this.activityEvent = crowi.event('activity');
-    this.pageModel = mongoose.model<IPage, PageModel>('Page');
-    this.pageBulkExportJobManager = new PageBulkExportJobManager(this);
-  }
-
-  /**
-   * Create a new page bulk export job and execute it
-   */
-  async createAndExecuteOrRestartBulkExportJob(basePagePath: string, currentUser, activityParameters: ActivityParameters, restartJob = false): Promise<void> {
-    const basePage = await this.pageModel.findByPathAndViewer(basePagePath, currentUser, null, true);
-
-    if (basePage == null) {
-      throw new Error('Base page not found or not accessible');
-    }
-
-    const format = PageBulkExportFormat.md;
-    const duplicatePageBulkExportJobInProgress: HydratedDocument<PageBulkExportJobDocument> | null = await PageBulkExportJob.findOne({
-      user: currentUser,
-      page: basePage,
-      format,
-      $or: Object.values(PageBulkExportJobInProgressStatus).map(status => ({ status })),
-    });
-    if (duplicatePageBulkExportJobInProgress != null) {
-      if (restartJob) {
-        this.restartBulkExportJob(duplicatePageBulkExportJobInProgress, activityParameters);
-        return;
-      }
-      throw new DuplicateBulkExportJobError(duplicatePageBulkExportJobInProgress);
-    }
-    const pageBulkExportJob: HydratedDocument<PageBulkExportJobDocument> = await PageBulkExportJob.create({
-      user: currentUser, page: basePage, format, status: PageBulkExportJobStatus.initializing,
-    });
-
-    await Subscription.upsertSubscription(currentUser, SupportedTargetModel.MODEL_PAGE_BULK_EXPORT_JOB, pageBulkExportJob, SubscriptionStatusType.SUBSCRIBE);
-
-    this.pageBulkExportJobManager.addJob(pageBulkExportJob, activityParameters);
-  }
-
-  /**
-   * Restart page bulk export job in progress from the beginning
-   */
-  async restartBulkExportJob(pageBulkExportJob: HydratedDocument<PageBulkExportJobDocument>, activityParameters: ActivityParameters): Promise<void> {
-    await this.cleanUpExportJobResources(pageBulkExportJob, true);
-
-    pageBulkExportJob.status = PageBulkExportJobStatus.initializing;
-    await pageBulkExportJob.save();
-    this.pageBulkExportJobManager.addJob(pageBulkExportJob, activityParameters);
-  }
-
-  /**
-   * Execute a page bulk export job. This method can also resume a previously inturrupted job.
-   */
-  async executePageBulkExportJob(pageBulkExportJob: HydratedDocument<PageBulkExportJobDocument>, activityParameters?: ActivityParameters): Promise<void> {
-    try {
-      const User = mongoose.model<IUser>('User');
-      const user = await User.findById(getIdForRef(pageBulkExportJob.user));
-
-      if (pageBulkExportJob.status === PageBulkExportJobStatus.initializing) {
-        await this.createPageSnapshots(user, pageBulkExportJob);
-
-        const duplicateExportJob = await PageBulkExportJob.findOne({
-          user: pageBulkExportJob.user,
-          page: pageBulkExportJob.page,
-          format: pageBulkExportJob.format,
-          status: PageBulkExportJobStatus.completed,
-          revisionListHash: pageBulkExportJob.revisionListHash,
-        });
-        if (duplicateExportJob != null) {
-          // if an upload with the exact same contents exists, re-use the same attachment of that upload
-          pageBulkExportJob.attachment = duplicateExportJob.attachment;
-          pageBulkExportJob.status = PageBulkExportJobStatus.completed;
-        }
-        else {
-          pageBulkExportJob.status = PageBulkExportJobStatus.exporting;
-        }
-        await pageBulkExportJob.save();
-      }
-      if (pageBulkExportJob.status === PageBulkExportJobStatus.exporting) {
-        await this.exportPagesToFS(pageBulkExportJob);
-        pageBulkExportJob.status = PageBulkExportJobStatus.uploading;
-        await pageBulkExportJob.save();
-      }
-      if (pageBulkExportJob.status === PageBulkExportJobStatus.uploading) {
-        await this.compressAndUpload(user, pageBulkExportJob);
-      }
-    }
-    catch (err) {
-      if (err instanceof BulkExportJobExpiredError) {
-        logger.error(err);
-        await this.notifyExportResultAndCleanUp(SupportedAction.ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED, pageBulkExportJob, activityParameters);
-      }
-      else if (err instanceof BulkExportJobRestartedError) {
-        logger.info(err.message);
-        await this.cleanUpExportJobResources(pageBulkExportJob);
-      }
-      else {
-        logger.error(err);
-        await this.notifyExportResultAndCleanUp(SupportedAction.ACTION_PAGE_BULK_EXPORT_FAILED, pageBulkExportJob, activityParameters);
-      }
-      return;
-    }
-
-    await this.notifyExportResultAndCleanUp(SupportedAction.ACTION_PAGE_BULK_EXPORT_COMPLETED, pageBulkExportJob, activityParameters);
-  }
-
-  /**
-   * Notify the user of the export result, and cleanup the resources used in the export process
-   * @param action whether the export was successful
-   * @param pageBulkExportJob the page bulk export job
-   * @param activityParameters parameters to record user activity
-   */
-  private async notifyExportResultAndCleanUp(
-      action: SupportedActionType,
-      pageBulkExportJob: PageBulkExportJobDocument,
-      activityParameters?: ActivityParameters,
-  ): Promise<void> {
-    pageBulkExportJob.status = action === SupportedAction.ACTION_PAGE_BULK_EXPORT_COMPLETED
-      ? PageBulkExportJobStatus.completed : PageBulkExportJobStatus.failed;
-
-    try {
-      await pageBulkExportJob.save();
-      await this.notifyExportResult(pageBulkExportJob, action, activityParameters);
-    }
-    catch (err) {
-      logger.error(err);
-    }
-    // execute independently of notif process resolve/reject
-    await this.cleanUpExportJobResources(pageBulkExportJob);
-  }
-
-  /**
-   * Create a snapshot for each page that is to be exported in the pageBulkExportJob.
-   * Also calulate revisionListHash and save it to the pageBulkExportJob.
-   */
-  private async createPageSnapshots(user, pageBulkExportJob: PageBulkExportJobDocument): Promise<void> {
-    // if the process of creating snapshots was interrupted, delete the snapshots and create from the start
-    await PageBulkExportPageSnapshot.deleteMany({ pageBulkExportJob });
-
-    const basePage = await this.pageModel.findById(getIdForRef(pageBulkExportJob.page));
-    if (basePage == null) {
-      throw new Error('Base page not found');
-    }
-
-    const revisionListHash = createHash('sha256');
-
-    // create a Readable for pages to be exported
-    const { PageQueryBuilder } = this.pageModel;
-    const builder = await new PageQueryBuilder(this.pageModel.find())
-      .addConditionToListWithDescendants(basePage.path)
-      .addViewerCondition(user);
-    const pagesReadable = builder
-      .query
-      .lean()
-      .cursor({ batchSize: this.pageBatchSize });
-
-    // create a Writable that creates a snapshot for each page
-    const pageSnapshotsWritable = new Writable({
-      objectMode: true,
-      write: async(page: PageDocument, encoding, callback) => {
-        try {
-          if (page.revision != null) {
-            revisionListHash.update(getIdStringForRef(page.revision));
-          }
-          await PageBulkExportPageSnapshot.create({
-            pageBulkExportJob,
-            path: page.path,
-            revision: page.revision,
-          });
-        }
-        catch (err) {
-          callback(err);
-          return;
-        }
-        callback();
-      },
-    });
-
-    this.pageBulkExportJobManager.updateJobStream(pageBulkExportJob._id, pagesReadable);
-
-    await pipelinePromise(pagesReadable, pageSnapshotsWritable);
-
-    pageBulkExportJob.revisionListHash = revisionListHash.digest('hex');
-    await pageBulkExportJob.save();
-  }
-
-  /**
-   * Export pages to the file system before compressing and uploading to the cloud storage.
-   * The export will resume from the last exported page if the process was interrupted.
-   */
-  private async exportPagesToFS(pageBulkExportJob: PageBulkExportJobDocument): Promise<void> {
-    const findQuery = pageBulkExportJob.lastExportedPagePath != null ? {
-      pageBulkExportJob,
-      path: { $gt: pageBulkExportJob.lastExportedPagePath },
-    } : { pageBulkExportJob };
-    const pageSnapshotsReadable = PageBulkExportPageSnapshot
-      .find(findQuery)
-      .populate('revision').sort({ path: 1 }).lean()
-      .cursor({ batchSize: this.pageBatchSize });
-
-    const pagesWritable = this.getPageWritable(pageBulkExportJob);
-
-    this.pageBulkExportJobManager.updateJobStream(pageBulkExportJob._id, pageSnapshotsReadable);
-
-    return pipelinePromise(pageSnapshotsReadable, pagesWritable);
-  }
-
-  /**
-   * Get a Writable that writes the page body temporarily to fs
-   */
-  private getPageWritable(pageBulkExportJob: PageBulkExportJobDocument): Writable {
-    const outputDir = this.getTmpOutputDir(pageBulkExportJob);
-    return new Writable({
-      objectMode: true,
-      write: async(page: PageBulkExportPageSnapshotDocument, encoding, callback) => {
-        try {
-          const revision = page.revision;
-
-          if (revision != null && isPopulated(revision)) {
-            const markdownBody = revision.body;
-            const pathNormalized = `${normalizePath(page.path)}.${PageBulkExportFormat.md}`;
-            const fileOutputPath = path.join(outputDir, pathNormalized);
-            const fileOutputParentPath = getParentPath(fileOutputPath);
-
-            await fs.promises.mkdir(fileOutputParentPath, { recursive: true });
-            await fs.promises.writeFile(fileOutputPath, markdownBody);
-            pageBulkExportJob.lastExportedPagePath = page.path;
-            await pageBulkExportJob.save();
-          }
-        }
-        catch (err) {
-          callback(err);
-          return;
-        }
-        callback();
-      },
-    });
-  }
-
-  /**
-   * Execute a pipeline that reads the page files from the temporal fs directory, compresses them, and uploads to the cloud storage
-   */
-  private async compressAndUpload(user, pageBulkExportJob: PageBulkExportJobDocument): Promise<void> {
-    const pageArchiver = this.setUpPageArchiver();
-    const bufferToPartSizeTransform = getBufferToFixedSizeTransform(this.maxPartSize);
-
-    if (pageBulkExportJob.revisionListHash == null) throw new Error('revisionListHash is not set');
-    const originalName = `${pageBulkExportJob.revisionListHash}.${this.compressExtension}`;
-    const attachment = Attachment.createWithoutSave(null, user, originalName, this.compressExtension, 0, AttachmentType.PAGE_BULK_EXPORT);
-    const uploadKey = `${FilePathOnStoragePrefix.pageBulkExport}/${attachment.fileName}`;
-
-    const fileUploadService: FileUploader = this.crowi.fileUploadService;
-    // if the process of uploading was interrupted, delete and start from the start
-    if (pageBulkExportJob.uploadKey != null && pageBulkExportJob.uploadId != null) {
-      await fileUploadService.abortPreviousMultipartUpload(pageBulkExportJob.uploadKey, pageBulkExportJob.uploadId);
-    }
-
-    // init multipart upload
-    const multipartUploader: IMultipartUploader = fileUploadService.createMultipartUploader(uploadKey, this.maxPartSize);
-    await multipartUploader.initUpload();
-    pageBulkExportJob.uploadKey = uploadKey;
-    pageBulkExportJob.uploadId = multipartUploader.uploadId;
-    await pageBulkExportJob.save();
-
-    const multipartUploadWritable = this.getMultipartUploadWritable(multipartUploader, pageBulkExportJob, attachment);
-
-    const compressAndUploadPromise = pipelinePromise(pageArchiver, bufferToPartSizeTransform, multipartUploadWritable);
-    pageArchiver.directory(this.getTmpOutputDir(pageBulkExportJob), false);
-    pageArchiver.finalize();
-
-    await compressAndUploadPromise;
-  }
-
-  private setUpPageArchiver(): Archiver {
-    const pageArchiver = archiver('tar', {
-      gzip: true,
-    });
-
-    // good practice to catch warnings (ie stat failures and other non-blocking errors)
-    pageArchiver.on('warning', (err) => {
-      if (err.code === 'ENOENT') logger.error(err);
-      else throw err;
-    });
-
-    return pageArchiver;
-  }
-
-  private getMultipartUploadWritable(
-      multipartUploader: IMultipartUploader,
-      pageBulkExportJob: PageBulkExportJobDocument,
-      attachment: IAttachmentDocument,
-  ): Writable {
-    let partNumber = 1;
-
-    return new Writable({
-      write: async(part: Buffer, encoding, callback) => {
-        try {
-          await multipartUploader.uploadPart(part, partNumber);
-          partNumber += 1;
-          // First aid to prevent unexplained memory leaks
-          logger.info('global.gc() invoked.');
-          gc();
-        }
-        catch (err) {
-          await multipartUploader.abortUpload();
-          callback(err);
-          return;
-        }
-        callback();
-      },
-      final: async(callback) => {
-        try {
-          await multipartUploader.completeUpload();
-
-          const fileSize = await multipartUploader.getUploadedFileSize();
-          attachment.fileSize = fileSize;
-          await attachment.save();
-
-          pageBulkExportJob.completedAt = new Date();
-          pageBulkExportJob.attachment = attachment._id;
-          await pageBulkExportJob.save();
-        }
-        catch (err) {
-          callback(err);
-          return;
-        }
-        callback();
-      },
-    });
-  }
-
-  /**
-   * Get the output directory on the fs to temporarily store page files before compressing and uploading
-   */
-  private getTmpOutputDir(pageBulkExportJob: PageBulkExportJobDocument): string {
-    return `${this.tmpOutputRootDir}/${pageBulkExportJob._id}`;
-  }
-
-  async notifyExportResult(
-      pageBulkExportJob: PageBulkExportJobDocument, action: SupportedActionType, activityParameters?: ActivityParameters,
-  ) {
-    const activity = await this.crowi.activityService.createActivity({
-      ...activityParameters,
-      action,
-      targetModel: SupportedTargetModel.MODEL_PAGE_BULK_EXPORT_JOB,
-      target: pageBulkExportJob,
-      user: pageBulkExportJob.user,
-      snapshot: {
-        username: isPopulated(pageBulkExportJob.user) ? pageBulkExportJob.user.username : '',
-      },
-    });
-    const getAdditionalTargetUsers = async(activity: ActivityDocument) => [activity.user];
-    const preNotify = preNotifyService.generatePreNotify(activity, getAdditionalTargetUsers);
-    this.activityEvent.emit('updated', activity, pageBulkExportJob, preNotify);
-  }
-
-  /**
-   * Do the following in parallel:
-   * - delete page snapshots
-   * - remove the temporal output directory
-   * - abort multipart upload
-   */
-  async cleanUpExportJobResources(pageBulkExportJob: PageBulkExportJobDocument, restarted = false) {
-    this.pageBulkExportJobManager.removeJobInProgressAndQueueNextJob(pageBulkExportJob._id, restarted);
-
-    const promises = [
-      PageBulkExportPageSnapshot.deleteMany({ pageBulkExportJob }),
-      fs.promises.rm(this.getTmpOutputDir(pageBulkExportJob), { recursive: true, force: true }),
-    ];
-
-    const fileUploadService: FileUploader = this.crowi.fileUploadService;
-    if (pageBulkExportJob.uploadKey != null && pageBulkExportJob.uploadId != null) {
-      promises.push(fileUploadService.abortPreviousMultipartUpload(pageBulkExportJob.uploadKey, pageBulkExportJob.uploadId));
-    }
-
-    const results = await Promise.allSettled(promises);
-    results.forEach((result) => {
-      if (result.status === 'rejected') logger.error(result.reason);
-    });
-  }
-
-}
-
-// eslint-disable-next-line import/no-mutable-exports
-export let pageBulkExportService: PageBulkExportService | undefined; // singleton instance
-export default function instanciate(crowi): void {
-  pageBulkExportService = new PageBulkExportService(crowi);
-}

+ 0 - 231
apps/app/src/features/page-bulk-export/server/service/page-bulk-export/page-bulk-export-job-manager.spec.ts

@@ -1,231 +0,0 @@
-import { Readable } from 'stream';
-import { finished } from 'stream/promises';
-
-import type { HydratedDocument } from 'mongoose';
-
-import { configManager } from '~/server/service/config-manager';
-
-import type { PageBulkExportJobDocument } from '../../models/page-bulk-export-job';
-
-import { BulkExportJobExpiredError, BulkExportJobRestartedError } from './errors';
-import { PageBulkExportJobManager } from './page-bulk-export-job-manager';
-
-describe('PageBulkExportJobManager', () => {
-  let pageBulkExportServiceMock;
-  let jobManager: PageBulkExportJobManager;
-
-  beforeAll(() => {
-    vi.spyOn(configManager, 'getConfig').mockImplementation((namespace, key) => {
-      if (namespace === 'crowi' && key === 'app:pageBulkExportParallelExecLimit') {
-        return 3;
-      }
-      return undefined; // or whatever the default return value should be
-    });
-  });
-
-  beforeEach(() => {
-    pageBulkExportServiceMock = {
-      executePageBulkExportJob: vi.fn(),
-    };
-    jobManager = new PageBulkExportJobManager(pageBulkExportServiceMock);
-  });
-
-  describe('canExecuteNextJob', () => {
-    it('should return true if jobs in progress are less than the limit', () => {
-      // act, assert
-      expect(jobManager.canExecuteNextJob()).toBe(true);
-    });
-
-    it('should return false if jobs in progress exceed the limit', () => {
-      // arrange
-      jobManager.jobsInProgress = {
-        job1: { stream: undefined },
-        job2: { stream: undefined },
-        job3: { stream: undefined },
-      };
-
-      // act, assert
-      expect(jobManager.canExecuteNextJob()).toBe(false);
-    });
-  });
-
-  describe('getJobInProgress', () => {
-    it('should return the info of job in progress', () => {
-      // arrange
-      const jobId = 'job1';
-      jobManager.jobsInProgress[jobId] = { stream: undefined };
-
-      // act, assert
-      expect(jobManager.getJobInProgress(jobId)).toEqual({ stream: undefined });
-    });
-
-    it('should return undefined if job is not in progress', () => {
-      // arrange
-      const jobId = 'job1';
-
-      // act, assert
-      expect(jobManager.getJobInProgress(jobId)).toBeUndefined();
-    });
-  });
-
-  describe('addJob', () => {
-    it('should add the job to jobsInProgress if under the parallelExecLimit', () => {
-      // arrange
-      const job = { _id: 'job1' } as HydratedDocument<PageBulkExportJobDocument>;
-      expect(jobManager.jobQueue.length).toBe(0);
-
-      // act
-      jobManager.addJob(job, { endpoint: '/test/endpoint' });
-
-      // assert
-      expect(jobManager.jobQueue.length).toBe(0);
-      expect(jobManager.jobsInProgress[job._id.toString()]).toEqual({ stream: undefined });
-      expect(pageBulkExportServiceMock.executePageBulkExportJob).toHaveBeenCalledWith(job, { endpoint: '/test/endpoint' });
-    });
-
-    it('should queue the job if the parallelExecLimit is reached', () => {
-      // arrange
-      jobManager.jobsInProgress = {
-        job1: { stream: undefined },
-        job2: { stream: undefined },
-        job3: { stream: undefined },
-      };
-      const job = { _id: 'job2' } as HydratedDocument<PageBulkExportJobDocument>;
-      expect(jobManager.jobQueue.length).toBe(0);
-
-      // act
-      jobManager.addJob(job);
-
-      // assert
-      expect(jobManager.jobQueue.length).toBe(1);
-      expect(jobManager.jobQueue[0]).toEqual({ job });
-      expect(pageBulkExportServiceMock.executePageBulkExportJob).not.toHaveBeenCalled();
-    });
-  });
-
-  describe('updateJobStream', () => {
-    it('should set a new stream when there are no streams executing for the job', () => {
-      // arrange
-      const jobId = 'job1';
-      const mockStream = new Readable();
-      jobManager.jobsInProgress[jobId] = { stream: undefined };
-
-      // act
-      jobManager.updateJobStream(jobId, mockStream);
-
-      // assert
-      expect(jobManager.jobsInProgress[jobId].stream).toBe(mockStream);
-    });
-
-    it('should set a new stream when previous stream is finished', async() => {
-      // arrange
-      const jobId = 'job1';
-      const oldStream = new Readable({
-        read(size) {
-          // End the stream immediately
-          this.push(null);
-        },
-      });
-      oldStream.read();
-      await finished(oldStream);
-      const newStream = vi.fn().mockImplementation(() => {
-        const stream = new Readable();
-        stream.destroy = vi.fn();
-        return stream;
-      })() as unknown as Readable;
-      jobManager.addJob({ _id: jobId } as HydratedDocument<PageBulkExportJobDocument>);
-
-      // act
-      jobManager.updateJobStream(jobId, oldStream);
-
-      // assert
-      expect(oldStream.readableEnded).toBe(true);
-      jobManager.updateJobStream(jobId, newStream);
-      expect(jobManager.getJobInProgress(jobId)?.stream).toBe(newStream);
-    });
-
-    it('should destroy non-finished stream with an error before setting a new stream', () => {
-      // arrange
-      const jobId = 'job1';
-      const oldStream = vi.fn().mockImplementation(() => {
-        const stream = new Readable();
-        stream.destroy = vi.fn();
-        return stream;
-      })();
-      const newStream = new Readable();
-      const destroySpy = vi.spyOn(oldStream, 'destroy');
-      jobManager.addJob({ _id: jobId } as HydratedDocument<PageBulkExportJobDocument>);
-      jobManager.updateJobStream(jobId, oldStream);
-
-      // act
-      jobManager.updateJobStream(jobId, newStream);
-      expect(destroySpy).toHaveBeenCalledWith(expect.any(Error));
-
-      // assert
-      expect(jobManager.getJobInProgress(jobId)?.stream).toBe(newStream);
-    });
-
-    it('should destroy the new stream with BulkExportJobExpiredError if job is not in progress', () => {
-      // arrange
-      const jobId = 'job1';
-      const newStream = vi.fn().mockImplementation(() => {
-        const stream = new Readable();
-        stream.destroy = vi.fn();
-        return stream;
-      })();
-      const destroySpy = vi.spyOn(newStream, 'destroy');
-
-      // act
-      jobManager.updateJobStream(jobId, newStream);
-
-      // assert
-      expect(destroySpy).toHaveBeenCalledWith(expect.any(BulkExportJobExpiredError));
-    });
-  });
-
-  describe('removeJobInProgressAndQueueNextJob', () => {
-    it('should remove the job in progress and queue the next job', () => {
-      // arrange
-      const jobId = 'job1';
-      const mockStream = vi.fn().mockImplementation(() => {
-        const stream = new Readable();
-        stream.destroy = vi.fn();
-        return stream;
-      })();
-      vi.spyOn(mockStream, 'destroy');
-      const nextJob = { _id: 'job2' } as HydratedDocument<PageBulkExportJobDocument>;
-      jobManager.jobsInProgress[jobId] = { stream: mockStream };
-      jobManager.jobQueue.push({ job: nextJob });
-      expect(jobManager.jobQueue.length).toBe(1);
-
-      // act
-      jobManager.removeJobInProgressAndQueueNextJob(jobId);
-
-      // assert
-      expect(jobManager.jobQueue.length).toBe(0);
-      expect(mockStream.destroy).toHaveBeenCalledWith(expect.any(BulkExportJobExpiredError));
-      expect(jobManager.jobsInProgress[jobId]).toBeUndefined();
-      expect(jobManager.jobsInProgress[nextJob._id.toString()]).toEqual({ stream: undefined });
-      expect(pageBulkExportServiceMock.executePageBulkExportJob).toHaveBeenCalledWith(nextJob, undefined);
-    });
-
-    it('should destroy the stream with a BulkExportJobRestartedError if job was restarted', () => {
-      // arrange
-      const jobId = 'job1';
-      const mockStream = vi.fn().mockImplementation(() => {
-        const stream = new Readable();
-        stream.destroy = vi.fn();
-        return stream;
-      })();
-      vi.spyOn(mockStream, 'destroy');
-      jobManager.jobsInProgress[jobId] = { stream: mockStream };
-
-      // act
-      jobManager.removeJobInProgressAndQueueNextJob(jobId, true);
-
-      // assert
-      expect(mockStream.destroy).toHaveBeenCalledWith(expect.any(BulkExportJobRestartedError));
-      expect(jobManager.jobsInProgress[jobId]).toBeUndefined();
-    });
-  });
-});

+ 0 - 125
apps/app/src/features/page-bulk-export/server/service/page-bulk-export/page-bulk-export-job-manager.ts

@@ -1,125 +0,0 @@
-import type { Readable } from 'stream';
-
-import type { HydratedDocument } from 'mongoose';
-
-import type { ObjectIdLike } from '~/server/interfaces/mongoose-utils';
-import { configManager } from '~/server/service/config-manager';
-
-import type { PageBulkExportJobDocument } from '../../models/page-bulk-export-job';
-
-import { BulkExportJobExpiredError, BulkExportJobRestartedError } from './errors';
-
-import type { ActivityParameters, IPageBulkExportService } from '.';
-
-/**
- * Manage PageBulkExportJob execution.
- * - Keep track of jobs being executed and enable destroying the stream if the job is terminated
- * - Limit the number of jobs being executed in parallel
- * - Queue jobs to be executed in order
- */
-export class PageBulkExportJobManager {
-
-  pageBulkExportService: IPageBulkExportService;
-
-  private parallelExecLimit: number;
-
-  // contains jobs being executed and it's information
-  // the key is the _id of PageBulkExportJob and the value contains the stream of the job
-  jobsInProgress: {
-    [key: string]: { stream: Readable | undefined };
-  } = {};
-
-  // jobs waiting to be executed in order
-  jobQueue: { job: HydratedDocument<PageBulkExportJobDocument>, activityParameters?: ActivityParameters }[] = [];
-
-  constructor(pageBulkExportService: IPageBulkExportService) {
-    this.pageBulkExportService = pageBulkExportService;
-    this.parallelExecLimit = configManager.getConfig('crowi', 'app:pageBulkExportParallelExecLimit');
-  }
-
-  canExecuteNextJob(): boolean {
-    return Object.keys(this.jobsInProgress).length < this.parallelExecLimit;
-  }
-
-  /**
-   * Get the information of a job in progress.
-   * A getter method that includes "undefined" in the return type
-   */
-  getJobInProgress(jobId: ObjectIdLike): { stream: Readable | undefined } | undefined {
-    return this.jobsInProgress[jobId.toString()];
-  }
-
-  /**
-   * Add a job to the queue or execute it if the number of jobs in progress is less than the limit
-   * @param job job to add or execute
-   * @param activityParameters parameters to record user activity
-   */
-  addJob(job: HydratedDocument<PageBulkExportJobDocument>, activityParameters?: ActivityParameters): void {
-    if (this.canExecuteNextJob()) {
-      this.jobsInProgress[job._id.toString()] = { stream: undefined };
-      this.pageBulkExportService.executePageBulkExportJob(job, activityParameters);
-    }
-    else {
-      this.jobQueue.push({ job, activityParameters });
-    }
-  }
-
-  /**
-   * Update the info of which stream is being executed for a job
-   * @param jobId id of job to update
-   * @param stream the new stream being executed for the job
-   */
-  updateJobStream(jobId: ObjectIdLike, stream: Readable): void {
-    const jobInProgress = this.getJobInProgress(jobId);
-    if (jobInProgress != null) {
-      if (jobInProgress.stream != null && !jobInProgress.stream.readableEnded) {
-        jobInProgress.stream.destroy(new Error('Stream not finished before next stream started'));
-      }
-      jobInProgress.stream = stream;
-    }
-    else {
-      // job was terminated beforehand, so destroy the stream
-      stream.destroy(new BulkExportJobExpiredError());
-    }
-  }
-
-  /**
-   * Remove a job in execution and queue the next job if there are any
-   * @param jobId id of job to remove
-   * @param isJobRestarted whether or not the job was restarted
-   */
-  removeJobInProgressAndQueueNextJob(jobId: ObjectIdLike, isJobRestarted = false): void {
-    this.removeJobInProgress(jobId, isJobRestarted);
-
-    if (this.jobQueue.length > 0) {
-      while (this.canExecuteNextJob() && this.jobQueue.length > 0) {
-        const nextJob = this.jobQueue.shift();
-        if (nextJob != null) {
-          this.jobsInProgress[nextJob.job._id.toString()] = { stream: undefined };
-          this.pageBulkExportService.executePageBulkExportJob(nextJob.job, nextJob.activityParameters);
-        }
-      }
-    }
-  }
-
-  /**
-   * Remove a job in execution and destroy it's stream process
-   * @param jobId id of job to remove
-   * @param isJobRestarted whether or not the job was restarted
-   */
-  private removeJobInProgress(jobId: ObjectIdLike, isJobRestarted = false): void {
-    const jobInProgress = this.getJobInProgress(jobId);
-    if (jobInProgress == null) return;
-
-    if (jobInProgress.stream != null) {
-      if (isJobRestarted) {
-        jobInProgress.stream.destroy(new BulkExportJobRestartedError());
-      }
-      else {
-        jobInProgress.stream.destroy(new BulkExportJobExpiredError());
-      }
-    }
-    delete this.jobsInProgress[jobId.toString()];
-  }
-
-}

+ 5 - 0
apps/app/src/features/rate-limiter/config/index.ts

@@ -56,6 +56,11 @@ export const defaultConfig: IApiRateLimitEndpointMap = {
     method: 'GET',
     maxRequests: MAX_REQUESTS_TIER_3,
   },
+  '/_api/v3/openai/rebuild-vector-store': {
+    method: 'POST',
+    maxRequests: 1,
+    usersPerIpProspection: 1,
+  },
 };
 
 const isDev = process.env.NODE_ENV === 'development';

+ 60 - 0
apps/app/src/features/rate-limiter/middleware/consume-points.integ.ts

@@ -0,0 +1,60 @@
+import { faker } from '@faker-js/faker';
+
+const testRateLimitErrorWhenExceedingMaxRequests = async(method: string, key: string, maxRequests: number): Promise<void> => {
+  // dynamic import is used because rateLimiterMongo needs to be initialized after connecting to DB
+  // Issue: https://github.com/animir/node-rate-limiter-flexible/issues/216
+  const { consumePoints } = await import('./consume-points');
+  let count = 0;
+  try {
+    for (let i = 1; i <= maxRequests + 1; i++) {
+      count += 1;
+      // eslint-disable-next-line no-await-in-loop
+      const res = await consumePoints(method, key, { method, maxRequests });
+      if (count === maxRequests) {
+        // Expect consumedPoints to be equal to maxRequest when maxRequest is reached
+        expect(res?.consumedPoints).toBe(maxRequests);
+        // Expect remainingPoints to be 0 when maxRequest is reached
+        expect(res?.remainingPoints).toBe(0);
+      }
+      if (count > maxRequests) {
+        throw new Error('Exception occurred');
+      }
+    }
+  }
+  catch (err) {
+    // Expect rate limit error to be called
+    expect(err.message).not.toBe('Exception occurred');
+    // Expect rate limit error at maxRequest + 1
+    expect(count).toBe(maxRequests + 1);
+  }
+};
+
+
+describe('consume-points.ts', async() => {
+  it('Should trigger a rate limit error when maxRequest is exceeded (maxRequest: 1)', async() => {
+    // setup
+    const method = 'GET';
+    const key = 'test-key-1';
+    const maxRequests = 1;
+
+    await testRateLimitErrorWhenExceedingMaxRequests(method, key, maxRequests);
+  });
+
+  it('Should trigger a rate limit error when maxRequest is exceeded (maxRequest: 500)', async() => {
+    // setup
+    const method = 'GET';
+    const key = 'test-key-2';
+    const maxRequests = 500;
+
+    await testRateLimitErrorWhenExceedingMaxRequests(method, key, maxRequests);
+  });
+
+  it('Should trigger a rate limit error when maxRequest is exceeded (maxRequest: {random integer between 1 and 1000})', async() => {
+    // setup
+    const method = 'GET';
+    const key = 'test-key-3';
+    const maxRequests = faker.number.int({ min: 1, max: 1000 });
+
+    await testRateLimitErrorWhenExceedingMaxRequests(method, key, maxRequests);
+  });
+});

+ 31 - 0
apps/app/src/features/rate-limiter/middleware/consume-points.ts

@@ -0,0 +1,31 @@
+import { type RateLimiterRes } from 'rate-limiter-flexible';
+
+import { DEFAULT_MAX_REQUESTS, type IApiRateLimitConfig } from '../config';
+
+import { rateLimiterFactory } from './rate-limiter-factory';
+
+export const consumePoints = async(
+    method: string, key: string | null, customizedConfig?: IApiRateLimitConfig, maxRequestsMultiplier?: number,
+): Promise<RateLimiterRes | undefined> => {
+  if (key == null) {
+    return;
+  }
+
+  let maxRequests = DEFAULT_MAX_REQUESTS;
+
+  // use customizedConfig
+  if (customizedConfig != null && (customizedConfig.method.includes(method) || customizedConfig.method === 'ALL')) {
+    maxRequests = customizedConfig.maxRequests;
+  }
+
+  // multiply
+  if (maxRequestsMultiplier != null) {
+    maxRequests *= maxRequestsMultiplier;
+  }
+
+  const rateLimiter = rateLimiterFactory.getOrCreateRateLimiter(key, maxRequests);
+
+  const pointsToConsume = 1;
+  const rateLimiterRes = await rateLimiter.consume(key, pointsToConsume);
+  return rateLimiterRes;
+};

+ 11 - 43
apps/app/src/features/rate-limiter/middleware/factory.ts

@@ -1,16 +1,14 @@
 import type { IUserHasId } from '@growi/core';
 import type { Handler, Request } from 'express';
 import md5 from 'md5';
-import { connection } from 'mongoose';
-import { type IRateLimiterMongoOptions, RateLimiterMongo } from 'rate-limiter-flexible';
+import { type RateLimiterRes } from 'rate-limiter-flexible';
 
 import loggerFactory from '~/utils/logger';
 
-import {
-  DEFAULT_DURATION_SEC, DEFAULT_MAX_REQUESTS, DEFAULT_USERS_PER_IP_PROSPECTION, type IApiRateLimitConfig,
-} from '../config';
+import { DEFAULT_USERS_PER_IP_PROSPECTION, type IApiRateLimitConfig } from '../config';
 import { generateApiRateLimitConfig } from '../utils/config-generator';
 
+import { consumePoints } from './consume-points';
 
 const logger = loggerFactory('growi:middleware:api-rate-limit');
 
@@ -19,15 +17,6 @@ const logger = loggerFactory('growi:middleware:api-rate-limit');
 // API_RATE_LIMIT_010_FOO_METHODS=GET,POST
 // API_RATE_LIMIT_010_FOO_MAX_REQUESTS=10
 
-const POINTS_THRESHOLD = 100;
-
-const opts: IRateLimiterMongoOptions = {
-  storeClient: connection,
-  points: POINTS_THRESHOLD, // set default value
-  duration: DEFAULT_DURATION_SEC, // set default value
-};
-const rateLimiter = new RateLimiterMongo(opts);
-
 // generate ApiRateLimitConfig for api rate limiter
 const apiRateLimitConfig = generateApiRateLimitConfig();
 const configWithoutRegExp = apiRateLimitConfig.withoutRegExp;
@@ -37,31 +26,6 @@ const keysWithRegExp = Object.keys(configWithRegExp).map(key => new RegExp(`^${k
 const valuesWithRegExp = Object.values(configWithRegExp);
 
 
-const _consumePoints = async(
-    method: string, key: string | null, customizedConfig?: IApiRateLimitConfig, maxRequestsMultiplier?: number,
-) => {
-  if (key == null) {
-    return;
-  }
-
-  let maxRequests = DEFAULT_MAX_REQUESTS;
-
-  // use customizedConfig
-  if (customizedConfig != null && (customizedConfig.method.includes(method) || customizedConfig.method === 'ALL')) {
-    maxRequests = customizedConfig.maxRequests;
-  }
-
-  // multiply
-  if (maxRequestsMultiplier != null) {
-    maxRequests *= maxRequestsMultiplier;
-  }
-
-  // because the maximum request is reduced by 1 if it is divisible by
-  // https://github.com/weseek/growi/pull/6225
-  const consumePoints = (POINTS_THRESHOLD + 0.0001) / maxRequests;
-  await rateLimiter.consume(key, consumePoints);
-};
-
 /**
  * consume per user per endpoint
  * @param method
@@ -69,8 +33,10 @@ const _consumePoints = async(
  * @param customizedConfig
  * @returns
  */
-const consumePointsByUser = async(method: string, key: string | null, customizedConfig?: IApiRateLimitConfig) => {
-  return _consumePoints(method, key, customizedConfig);
+const consumePointsByUser = async(
+    method: string, key: string | null, customizedConfig?: IApiRateLimitConfig,
+): Promise<RateLimiterRes | undefined> => {
+  return consumePoints(method, key, customizedConfig);
 };
 
 /**
@@ -80,9 +46,11 @@ const consumePointsByUser = async(method: string, key: string | null, customized
  * @param customizedConfig
  * @returns
  */
-const consumePointsByIp = async(method: string, key: string | null, customizedConfig?: IApiRateLimitConfig) => {
+const consumePointsByIp = async(
+    method: string, key: string | null, customizedConfig?: IApiRateLimitConfig,
+): Promise<RateLimiterRes | undefined> => {
   const maxRequestsMultiplier = customizedConfig?.usersPerIpProspection ?? DEFAULT_USERS_PER_IP_PROSPECTION;
-  return _consumePoints(method, key, customizedConfig, maxRequestsMultiplier);
+  return consumePoints(method, key, customizedConfig, maxRequestsMultiplier);
 };
 
 

+ 30 - 0
apps/app/src/features/rate-limiter/middleware/rate-limiter-factory.ts

@@ -0,0 +1,30 @@
+import { connection } from 'mongoose';
+import { type IRateLimiterMongoOptions, RateLimiterMongo } from 'rate-limiter-flexible';
+
+import { DEFAULT_DURATION_SEC } from '../config';
+
+class RateLimiterFactory {
+
+  private rateLimiters: Map<string, RateLimiterMongo> = new Map();
+
+  getOrCreateRateLimiter(key: string, maxRequests: number): RateLimiterMongo {
+    const cachedRateLimiter = this.rateLimiters.get(key);
+    if (cachedRateLimiter != null) {
+      return cachedRateLimiter;
+    }
+
+    const opts: IRateLimiterMongoOptions = {
+      storeClient: connection,
+      duration: DEFAULT_DURATION_SEC,
+      points: maxRequests,
+    };
+
+    const rateLimiter = new RateLimiterMongo(opts);
+    this.rateLimiters.set(key, rateLimiter);
+
+    return rateLimiter;
+  }
+
+}
+
+export const rateLimiterFactory = new RateLimiterFactory();

+ 9 - 36
apps/app/src/server/crowi/index.js

@@ -12,11 +12,12 @@ import pkg from '^/package.json';
 
 import { KeycloakUserGroupSyncService } from '~/features/external-user-group/server/service/keycloak-user-group-sync';
 import { LdapUserGroupSyncService } from '~/features/external-user-group/server/service/ldap-user-group-sync';
-import { PageBulkExportJobInProgressStatus } from '~/features/page-bulk-export/interfaces/page-bulk-export';
-import PageBulkExportJob from '~/features/page-bulk-export/server/models/page-bulk-export-job';
-import instanciatePageBulkExportService, { pageBulkExportService } from '~/features/page-bulk-export/server/service/page-bulk-export';
-import instanciatePageBulkExportJobCronService, { pageBulkExportJobCronService } from '~/features/page-bulk-export/server/service/page-bulk-export-job-cron';
 import { startCronIfEnabled as startOpenaiCronIfEnabled } from '~/features/openai/server/services/cron';
+import { checkPageBulkExportJobInProgressCronService } from '~/features/page-bulk-export/server/service/check-page-bulk-export-job-in-progress-cron';
+import instanciatePageBulkExportJobCleanUpCronService, {
+  pageBulkExportJobCleanUpCronService,
+} from '~/features/page-bulk-export/server/service/page-bulk-export-job-clean-up-cron';
+import instanciatePageBulkExportJobCronService from '~/features/page-bulk-export/server/service/page-bulk-export-job-cron';
 import QuestionnaireService from '~/features/questionnaire/server/service/questionnaire';
 import questionnaireCronService from '~/features/questionnaire/server/service/questionnaire-cron';
 import loggerFactory from '~/utils/logger';
@@ -81,7 +82,6 @@ class Crowi {
 
   constructor() {
     this.version = pkg.version;
-    this.runtimeVersions = undefined; // initialized by scanRuntimeVersions()
 
     this.publicDir = path.join(projectRoot, 'public') + sep;
     this.resourceDir = path.join(projectRoot, 'resource') + sep;
@@ -161,7 +161,6 @@ Crowi.prototype.init = async function() {
   ]);
 
   await Promise.all([
-    this.scanRuntimeVersions(),
     this.setupPassport(),
     this.setupSearcher(),
     this.setupMailer(),
@@ -175,7 +174,6 @@ Crowi.prototype.init = async function() {
     this.setupUserGroupService(),
     this.setupExport(),
     this.setupImport(),
-    this.setupPageBulkExportService(),
     this.setupGrowiPluginService(),
     this.setupPageService(),
     this.setupInAppNotificationService(),
@@ -196,8 +194,6 @@ Crowi.prototype.init = async function() {
   ]);
 
   await normalizeData();
-
-  this.resumeIncompletePageBulkExportJobs();
 };
 
 /**
@@ -332,7 +328,10 @@ Crowi.prototype.setupCron = function() {
   questionnaireCronService.startCron();
 
   instanciatePageBulkExportJobCronService(this);
-  pageBulkExportJobCronService.startCron();
+  checkPageBulkExportJobInProgressCronService.startCron();
+
+  instanciatePageBulkExportJobCleanUpCronService(this);
+  pageBulkExportJobCleanUpCronService.startCron();
 
   startOpenaiCronIfEnabled();
 };
@@ -341,21 +340,6 @@ Crowi.prototype.setupQuestionnaireService = function() {
   this.questionnaireService = new QuestionnaireService(this);
 };
 
-Crowi.prototype.scanRuntimeVersions = async function() {
-  const self = this;
-
-  const check = require('check-node-version');
-  return new Promise((resolve, reject) => {
-    check((err, result) => {
-      if (err) {
-        reject(err);
-      }
-      self.runtimeVersions = result;
-      resolve();
-    });
-  });
-};
-
 Crowi.prototype.getSlack = function() {
   return this.slack;
 };
@@ -700,10 +684,6 @@ Crowi.prototype.setupExport = async function() {
   instanciateExportService(this);
 };
 
-Crowi.prototype.setupPageBulkExportService = async function() {
-  instanciatePageBulkExportService(this);
-};
-
 Crowi.prototype.setupImport = async function() {
   initializeImportService(this);
 };
@@ -796,11 +776,4 @@ Crowi.prototype.setupExternalUserGroupSyncService = function() {
   this.keycloakUserGroupSyncService = new KeycloakUserGroupSyncService(this.s2sMessagingService, this.socketIoService);
 };
 
-Crowi.prototype.resumeIncompletePageBulkExportJobs = async function() {
-  const jobs = await PageBulkExportJob.find({
-    $or: Object.values(PageBulkExportJobInProgressStatus).map(status => ({ status })),
-  });
-  jobs.forEach(job => pageBulkExportService?.pageBulkExportJobManager?.addJob(job));
-};
-
 export default Crowi;

+ 6 - 3
apps/app/src/server/routes/apiv3/admin-home.js → apps/app/src/server/routes/apiv3/admin-home.ts

@@ -83,11 +83,14 @@ module.exports = (crowi) => {
    *                      $ref: "#/components/schemas/SystemInformationParams"
    */
   router.get('/', loginRequiredStrictly, adminRequired, async(req, res) => {
+    const { getRuntimeVersions } = await import('~/server/util/runtime-versions');
+    const runtimeVersions = await getRuntimeVersions();
+
     const adminHomeParams = {
       growiVersion: crowi.version,
-      nodeVersion: crowi.runtimeVersions.versions.node ? crowi.runtimeVersions.versions.node.version.version : '-',
-      npmVersion: crowi.runtimeVersions.versions.npm ? crowi.runtimeVersions.versions.npm.version.version : '-',
-      pnpmVersion: crowi.runtimeVersions.versions.pnpm ? crowi.runtimeVersions.versions.pnpm.version.version : '-',
+      nodeVersion: runtimeVersions.node ?? '-',
+      npmVersion: runtimeVersions.npm ?? '-',
+      pnpmVersion: runtimeVersions.pnpm ?? '-',
       envVars: await ConfigLoader.getEnvVarsForDisplay(true),
       isV5Compatible: crowi.configManager.getConfig('crowi', 'app:isV5Compatible'),
       isMaintenanceMode: crowi.configManager.getConfig('crowi', 'app:isMaintenanceMode'),

+ 89 - 37
apps/app/src/server/routes/apiv3/attachment.js

@@ -30,10 +30,52 @@ const {
  *
  *  components:
  *    schemas:
+ *      AttachmentPaginateResult:
+ *        description: AttachmentPaginateResult
+ *        type: object
+ *        properties:
+ *          docs:
+ *            type: array
+ *            items:
+ *              $ref: '#/components/schemas/Attachment'
+ *          totalDocs:
+ *            type: number
+ *            example: 1
+ *          limit:
+ *            type: number
+ *            example: 20
+ *          totalPages:
+ *            type: number
+ *            example: 1
+ *          page:
+ *            type: number
+ *            example: 1
+ *          offset:
+ *            type: number
+ *            example: 0
+ *          prevPage:
+ *            type: number
+ *            example: null
+ *          nextPage:
+ *            type: number
+ *            example: null
+ *          hasNextPage:
+ *            type: boolean
+ *            example: false
+ *          hasPrevPage:
+ *            type: boolean
+ *            example: false
+ *          pagingCounter:
+ *            type: number
+ *            example: 1
  *      Attachment:
  *        description: Attachment
  *        type: object
  *        properties:
+ *          id:
+ *            type: string
+ *            description: attachment ID
+ *            example: 5e0734e072560e001761fa67
  *          _id:
  *            type: string
  *            description: attachment ID
@@ -42,6 +84,10 @@ const {
  *            type: number
  *            description: attachment version
  *            example: 0
+ *          attachmentType:
+ *            type: string
+ *            description: attachment type
+ *            example: WIKI_PAGE
  *          fileFormat:
  *            type: string
  *            description: file format in MIME
@@ -55,6 +101,7 @@ const {
  *            description: original file name
  *            example: file.txt
  *          creator:
+ *            type: object
  *            $ref: '#/components/schemas/User'
  *          page:
  *            type: string
@@ -64,14 +111,14 @@ const {
  *            type: string
  *            description: date created at
  *            example: 2010-01-01T00:00:00.000Z
+ *          temporaryUrlExpiredAt:
+ *            type: string
+ *            description: temporary URL expired at
+ *            example: 2024-11-27T00:59:59.962Z
  *          fileSize:
  *            type: number
  *            description: file size
  *            example: 3494332
- *          url:
- *            type: string
- *            description: attachment URL
- *            example: http://localhost/files/5e0734e072560e001761fa67
  *          filePathProxied:
  *            type: string
  *            description: file path proxied
@@ -80,8 +127,11 @@ const {
  *            type: string
  *            description: download path proxied
  *            example: "/download/5e0734e072560e001761fa67"
+ *          temporaryUrlCached:
+ *            type: string
+ *            description: temporary URL cached
+ *            example: "https://example.com/attachment/5e0734e072560e001761fa67"
  */
-
 module.exports = (crowi) => {
   const loginRequired = require('../../middlewares/login-required')(crowi, true);
   const loginRequiredStrictly = require('../../middlewares/login-required')(crowi);
@@ -117,16 +167,35 @@ module.exports = (crowi) => {
    *      get:
    *        tags: [Attachment]
    *        description: Get attachment list
-   *        responses:
-   *          200:
-   *            description: Return attachment list
    *        parameters:
-   *          - name: page_id
+   *          - name: pageId
    *            in: query
    *            required: true
    *            description: page id
    *            schema:
    *              type: string
+   *          - name: pageNumber
+   *            in: query
+   *            required: false
+   *            description: page number
+   *            schema:
+   *              type: number
+   *              example: 1
+   *          - name: limit
+   *            in: query
+   *            required: false
+   *            description: limit
+   *            schema:
+   *              type: number
+   *              example: 10
+   *        responses:
+   *          200:
+   *            description: Return attachment list
+   *            content:
+   *              application/json:
+   *                schema:
+   *                  type: object
+   *                  $ref: '#/components/schemas/AttachmentPaginateResult'
    */
   router.get('/list', accessTokenParser, loginRequired, validator.retrieveAttachments, apiV3FormValidator, async(req, res) => {
 
@@ -202,11 +271,6 @@ module.exports = (crowi) => {
    *          500:
    *            $ref: '#/components/responses/500'
    */
-  /**
-   * @api {get} /attachment/limit get available capacity of uploaded file with GridFS
-   * @apiName AddAttachment
-   * @apiGroup Attachment
-   */
   router.get('/limit', accessTokenParser, loginRequiredStrictly, validator.retrieveFileLimit, apiV3FormValidator, async(req, res) => {
     const { fileUploadService } = crowi;
     const fileSize = Number(req.query.fileSize);
@@ -234,10 +298,7 @@ module.exports = (crowi) => {
    *              schema:
    *                properties:
    *                  page_id:
-   *                    nullable: true
-   *                    type: string
-   *                  path:
-   *                    nullable: true
+   *                    nullable: false
    *                    type: string
    *                  file:
    *                    type: string
@@ -250,10 +311,7 @@ module.exports = (crowi) => {
    *              schema:
    *                properties:
    *                  page_id:
-   *                    nullable: true
-   *                    type: string
-   *                  path:
-   *                    nullable: true
+   *                    nullable: false
    *                    type: string
    *                  file:
    *                    type: string
@@ -273,26 +331,13 @@ module.exports = (crowi) => {
    *                      $ref: '#/components/schemas/Page'
    *                    attachment:
    *                      $ref: '#/components/schemas/Attachment'
-   *                    url:
-   *                      $ref: '#/components/schemas/Attachment/properties/url'
-   *                    pageCreated:
-   *                      type: boolean
-   *                      description: whether the page was created
-   *                      example: false
+   *                    revision:
+   *                      type: string
    *          403:
    *            $ref: '#/components/responses/403'
    *          500:
    *            $ref: '#/components/responses/500'
    */
-  /**
-   * @api {post} /attachment Add attachment to the page
-   * @apiName AddAttachment
-   * @apiGroup Attachment
-   *
-   * @apiParam {String} page_id
-   * @apiParam {String} path
-   * @apiParam {File} file
-   */
   router.post('/', uploads.single('file'), autoReap, accessTokenParser, loginRequiredStrictly, excludeReadOnlyUser,
     validator.retrieveAddAttachment, apiV3FormValidator, addActivity,
     async(req, res) => {
@@ -342,6 +387,13 @@ module.exports = (crowi) => {
    *        responses:
    *          200:
    *            description: Return attachment
+   *            content:
+   *              application/json:
+   *                schema:
+   *                  type: object
+   *                  properties:
+   *                    attachment:
+   *                      $ref: '#/components/schemas/Attachment'
    *        parameters:
    *          - name: id
    *            in: path

+ 286 - 3
apps/app/src/server/routes/apiv3/bookmark-folder.ts

@@ -16,6 +16,85 @@ const express = require('express');
 
 const router = express.Router();
 
+/**
+ * @swagger
+ *
+ *  components:
+ *    schemas:
+ *      BookmarkFolder:
+ *        description: Bookmark Folder
+ *        type: object
+ *        properties:
+ *          _id:
+ *            type: string
+ *            description: Bookmark Folder ID
+ *          __v:
+ *            type: number
+ *            description: Version of the bookmark folder
+ *          name:
+ *            type: string
+ *            description: Name of the bookmark folder
+ *          owner:
+ *            type: string
+ *            description: Owner user ID of the bookmark folder
+ *          bookmarks:
+ *            type: array
+ *            items:
+ *              type: object
+ *              properties:
+ *                _id:
+ *                  type: string
+ *                  description: Bookmark ID
+ *                user:
+ *                  type: string
+ *                  description: User ID of the bookmarker
+ *                createdAt:
+ *                  type: string
+ *                  description: Date and time when the bookmark was created
+ *                __v:
+ *                  type: number
+ *                  description: Version of the bookmark
+ *                page:
+ *                  description: Pages that are bookmarked in the folder
+ *                  allOf:
+ *                    - $ref: '#/components/schemas/Page'
+ *                    - type: object
+ *                      properties:
+ *                        id:
+ *                          type: string
+ *                          description: Page ID
+ *                          example: "671b5cd38d45e62b52217ff8"
+ *                        parent:
+ *                          type: string
+ *                          description: Parent page ID
+ *                          example: 669a5aa48d45e62b521d00da
+ *                        descendantCount:
+ *                          type: number
+ *                          description: Number of descendants
+ *                          example: 0
+ *                        isEmpty:
+ *                          type: boolean
+ *                          description: Whether the page is empty
+ *                          example: false
+ *                        grantedGroups:
+ *                          type: array
+ *                          description: List of granted groups
+ *                          items:
+ *                            type: string
+ *                        creator:
+ *                          type: string
+ *                          description: Creator user ID
+ *                          example: "669a5aa48d45e62b521d00e4"
+ *                        latestRevisionBodyLength:
+ *                          type: number
+ *                          description: Length of the latest revision body
+ *                          example: 241
+ *          childFolder:
+ *            type: array
+ *            items:
+ *              type: object
+ *              $ref: '#/components/schemas/BookmarkFolder'
+ */
 const validator = {
   bookmarkFolder: [
     body('name').isString().withMessage('name must be a string'),
@@ -42,7 +121,40 @@ const validator = {
 module.exports = (crowi) => {
   const loginRequiredStrictly = require('../../middlewares/login-required')(crowi);
 
-  // Create new bookmark folder
+  /**
+   * @swagger
+   *
+   *    /bookmark-folder:
+   *      post:
+   *        tags: [BookmarkFolders]
+   *        operationId: createBookmarkFolder
+   *        security:
+   *          - api_key: []
+   *        summary: Create bookmark folder
+   *        description: Create a new bookmark folder
+   *        requestBody:
+   *          content:
+   *            application/json:
+   *              schema:
+   *                properties:
+   *                  name:
+   *                    type: string
+   *                    description: Name of the bookmark folder
+   *                    nullable: false
+   *                  parent:
+   *                    type: string
+   *                    description: Parent folder ID
+   *        responses:
+   *          200:
+   *            description: Resources are available
+   *            content:
+   *              application/json:
+   *                schema:
+   *                  properties:
+   *                    bookmarkFolder:
+   *                      type: object
+   *                      $ref: '#/components/schemas/BookmarkFolder'
+   */
   router.post('/', accessTokenParser, loginRequiredStrictly, validator.bookmarkFolder, apiV3FormValidator, async(req, res) => {
     const owner = req.user?._id;
     const { name, parent } = req.body;
@@ -64,7 +176,37 @@ module.exports = (crowi) => {
     }
   });
 
-  // List bookmark folders and child
+  /**
+   * @swagger
+   *
+   *    /bookmark-folder/list/{userId}:
+   *      get:
+   *        tags: [BookmarkFolders]
+   *        operationId: listBookmarkFolders
+   *        security:
+   *          - api_key: []
+   *        summary: List bookmark folders of a user
+   *        description: List bookmark folders of a user
+   *        parameters:
+   *         - name: userId
+   *           in: path
+   *           required: true
+   *           description: User ID
+   *           schema:
+   *             type: string
+   *        responses:
+   *          200:
+   *            description: Resources are available
+   *            content:
+   *              application/json:
+   *                schema:
+   *                  properties:
+   *                    bookmarkFolderItems:
+   *                      type: array
+   *                      items:
+   *                        type: object
+   *                        $ref: '#/components/schemas/BookmarkFolder'
+   */
   router.get('/list/:userId', accessTokenParser, loginRequiredStrictly, async(req, res) => {
     const { userId } = req.params;
 
@@ -123,7 +265,36 @@ module.exports = (crowi) => {
     }
   });
 
-  // Delete bookmark folder and children
+  /**
+   * @swagger
+   *
+   *    /bookmark-folder/{id}:
+   *      delete:
+   *        tags: [BookmarkFolders]
+   *        operationId: deleteBookmarkFolder
+   *        security:
+   *          - api_key: []
+   *        summary: Delete bookmark folder
+   *        description: Delete a bookmark folder and its children
+   *        parameters:
+   *         - name: id
+   *           in: path
+   *           required: true
+   *           description: Bookmark Folder ID
+   *           schema:
+   *             type: string
+   *        responses:
+   *          200:
+   *            description: Deleted successfully
+   *            content:
+   *              application/json:
+   *                schema:
+   *                  properties:
+   *                    deletedCount:
+   *                      type: number
+   *                      description: Number of deleted folders
+   *                      example: 1
+   */
   router.delete('/:id', accessTokenParser, loginRequiredStrictly, async(req, res) => {
     const { id } = req.params;
     try {
@@ -137,6 +308,49 @@ module.exports = (crowi) => {
     }
   });
 
+  /**
+   * @swagger
+   *
+   *    /bookmark-folder:
+   *      put:
+   *        tags: [BookmarkFolders]
+   *        operationId: updateBookmarkFolder
+   *        security:
+   *          - api_key: []
+   *        summary: Update bookmark folder
+   *        description: Update a bookmark folder
+   *        requestBody:
+   *          content:
+   *            application/json:
+   *              schema:
+   *                properties:
+   *                  bookmarkFolderId:
+   *                    type: string
+   *                    description: Bookmark Folder ID
+   *                  name:
+   *                    type: string
+   *                    description: Name of the bookmark folder
+   *                    nullable: false
+   *                  parent:
+   *                    type: string
+   *                    description: Parent folder ID
+   *                  childFolder:
+   *                    type: array
+   *                    description: Child folders
+   *                    items:
+   *                      type: object
+   *                      $ref: '#/components/schemas/BookmarkFolder'
+   *        responses:
+   *          200:
+   *            description: Resources are available
+   *            content:
+   *              application/json:
+   *                schema:
+   *                  properties:
+   *                    bookmarkFolder:
+   *                      type: object
+   *                      $ref: '#/components/schemas/BookmarkFolder'
+   */
   router.put('/', accessTokenParser, loginRequiredStrictly, validator.bookmarkFolder, async(req, res) => {
     const {
       bookmarkFolderId, name, parent, childFolder,
@@ -151,6 +365,41 @@ module.exports = (crowi) => {
     }
   });
 
+  /**
+   * @swagger
+   *
+   *    /bookmark-folder/add-boookmark-to-folder:
+   *      post:
+   *        tags: [BookmarkFolders]
+   *        operationId: addBookmarkToFolder
+   *        security:
+   *          - api_key: []
+   *        summary: Update bookmark folder
+   *        description: Update a bookmark folder
+   *        requestBody:
+   *          content:
+   *            application/json:
+   *              schema:
+   *                properties:
+   *                  pageId:
+   *                    type: string
+   *                    description: Page ID
+   *                    nullable: false
+   *                  folderId:
+   *                    type: string
+   *                    description: Folder ID
+   *                    nullable: true
+   *        responses:
+   *          200:
+   *            description: Resources are available
+   *            content:
+   *              application/json:
+   *                schema:
+   *                  properties:
+   *                    bookmarkFolder:
+   *                      type: object
+   *                      $ref: '#/components/schemas/BookmarkFolder'
+   */
   router.post('/add-boookmark-to-folder', accessTokenParser, loginRequiredStrictly, validator.bookmarkPage, apiV3FormValidator, async(req, res) => {
     const userId = req.user?._id;
     const { pageId, folderId } = req.body;
@@ -166,6 +415,40 @@ module.exports = (crowi) => {
     }
   });
 
+  /**
+   * @swagger
+   *
+   *    /bookmark-folder/update-bookmark:
+   *      put:
+   *        tags: [BookmarkFolders]
+   *        operationId: updateBookmarkInFolder
+   *        security:
+   *          - api_key: []
+   *        summary: Update bookmark in folder
+   *        description: Update a bookmark in a folder
+   *        requestBody:
+   *          content:
+   *            application/json:
+   *              schema:
+   *                properties:
+   *                  pageId:
+   *                    type: string
+   *                    description: Page ID
+   *                    nullable: false
+   *                  status:
+   *                    type: string
+   *                    description: Bookmark status
+   *        responses:
+   *          200:
+   *            description: Resources are available
+   *            content:
+   *              application/json:
+   *                schema:
+   *                  properties:
+   *                    bookmarkFolder:
+   *                      type: object
+   *                      $ref: '#/components/schemas/BookmarkFolder'
+   */
   router.put('/update-bookmark', accessTokenParser, loginRequiredStrictly, validator.bookmark, async(req, res) => {
     const { pageId, status } = req.body;
     const userId = req.user?._id;

+ 3 - 2
apps/app/src/server/routes/apiv3/page/index.ts

@@ -1,5 +1,6 @@
 import path from 'path';
-import { pipeline, type Readable } from 'stream';
+import { type Readable } from 'stream';
+import { pipeline } from 'stream/promises';
 
 import type { IPage } from '@growi/core';
 import {
@@ -765,7 +766,7 @@ module.exports = (crowi) => {
     };
     await crowi.activityService.createActivity(parameters);
 
-    return pipeline(stream, res);
+    await pipeline(stream, res);
   });
 
   /**

+ 6 - 1
apps/app/src/server/routes/login.js

@@ -50,10 +50,15 @@ module.exports = function(crowi, app) {
       targetModel: SupportedTargetModel.MODEL_USER,
     });
 
+    /**
+     * @param {import('../service/pre-notify').PreNotifyProps} props
+     */
     const preNotify = async(props) => {
+      /** @type {(import('mongoose').HydratedDocument<import('@growi/core').IUser>)[]} */
       const adminUsers = await User.findAdmins();
 
-      props.push(...adminUsers);
+      const { notificationTargetUsers } = props;
+      notificationTargetUsers?.push(...adminUsers);
     };
 
     await activityEvent.emit('updated', activity, user, preNotify);

+ 12 - 0
apps/app/src/server/service/config-loader.ts

@@ -767,6 +767,18 @@ const ENV_VAR_NAME_TO_CONFIG_INFO: Record<string, EnvConfig> = {
     ns: 'crowi',
     key: 'app:pageBulkExportJobCronSchedule',
     type: ValueType.STRING,
+    default: '*/10 * * * * *', // every 10 seconds
+  },
+  CHECK_PAGE_BULK_EXPORT_JOB_IN_PROGRESS_CRON_SCHEDULE: {
+    ns: 'crowi',
+    key: 'app:checkPageBulkExportJobInProgressCronSchedule',
+    type: ValueType.STRING,
+    default: '*/3 * * * *', // every 3 minutes
+  },
+  BULK_EXPORT_JOB_CLEAN_UP_CRON_SCHEDULE: {
+    ns: 'crowi',
+    key: 'app:pageBulkExportJobCleanUpCronSchedule',
+    type: ValueType.STRING,
     default: '*/10 * * * *', // every 10 minutes
   },
   BULK_EXPORT_PARALLEL_EXEC_LIMIT: {

+ 7 - 2
apps/app/src/server/service/cron.ts

@@ -11,7 +11,7 @@ const logger = loggerFactory('growi:service:cron');
 abstract class CronService {
 
   // The current cronjob to manage
-  cronJob: ScheduledTask;
+  cronJob: ScheduledTask | undefined;
 
   /**
    * Create and start a new cronjob
@@ -26,7 +26,12 @@ abstract class CronService {
    * Stop the current cronjob
    */
   stopCron(): void {
-    this.cronJob.stop();
+    this.cronJob?.stop();
+    this.cronJob = undefined;
+  }
+
+  isJobRunning(): boolean {
+    return this.cronJob != null;
   }
 
   /**

+ 61 - 0
apps/app/src/server/util/runtime-versions.ts

@@ -0,0 +1,61 @@
+import checkNodeVersion from 'check-node-version';
+
+type RuntimeVersions = {
+  node: string | undefined;
+  npm: string | undefined;
+  pnpm: string | undefined;
+};
+
+
+// define original types because the object returned is not according to the official type definition
+type SatisfiedVersionInfo = {
+  isSatisfied: true;
+  version: {
+    version: string;
+  }
+}
+
+type NotfoundVersionInfo = {
+  isSatisfied: true;
+  notfound: true;
+}
+
+type VersionInfo = SatisfiedVersionInfo | NotfoundVersionInfo;
+
+function isNotfoundVersionInfo(info: VersionInfo): info is NotfoundVersionInfo {
+  return 'notfound' in info;
+}
+
+function isSatisfiedVersionInfo(info: VersionInfo): info is SatisfiedVersionInfo {
+  return 'version' in info;
+}
+
+const getVersion = (versionInfo: VersionInfo): string | undefined => {
+  if (isNotfoundVersionInfo(versionInfo)) {
+    return undefined;
+  }
+
+  if (isSatisfiedVersionInfo(versionInfo)) {
+    return versionInfo.version.version;
+  }
+
+  return undefined;
+};
+
+
+export function getRuntimeVersions(): Promise<RuntimeVersions> {
+  return new Promise((resolve, reject) => {
+    checkNodeVersion({}, (error, result) => {
+      if (error) {
+        reject(error);
+        return;
+      }
+
+      resolve({
+        node: getVersion(result.versions.node as unknown as VersionInfo),
+        npm: getVersion(result.versions.npm as unknown as VersionInfo),
+        pnpm: getVersion(result.versions.pnpm as unknown as VersionInfo),
+      });
+    });
+  });
+}

+ 1 - 2
apps/slackbot-proxy/package.json

@@ -1,6 +1,6 @@
 {
   "name": "@growi/slackbot-proxy",
-  "version": "7.1.3-slackbot-proxy.0",
+  "version": "7.1.5-slackbot-proxy.0",
   "license": "MIT",
   "private": "true",
   "scripts": {
@@ -76,7 +76,6 @@
     "@types/bunyan": "^1.8.11",
     "bootstrap": "=5.3.2",
     "browser-bunyan": "^1.6.3",
-    "eslint-plugin-regex": "^1.8.0",
     "morgan": "^1.10.0"
   }
 }

+ 2 - 1
package.json

@@ -1,6 +1,6 @@
 {
   "name": "growi",
-  "version": "7.1.3-RC.0",
+  "version": "7.1.5-RC.0",
   "description": "Team collaboration software using markdown",
   "license": "MIT",
   "private": "true",
@@ -72,6 +72,7 @@
     "eslint-plugin-react-hooks": "^4.6.0",
     "eslint-plugin-rulesdir": "^0.2.2",
     "eslint-plugin-vitest": "^0.2.3",
+    "eslint-plugin-regex": "^1.8.0",
     "glob": "^8.1.0",
     "mock-require": "^3.0.3",
     "nodemon": "^3.1.3",

+ 0 - 1
packages/core/package.json

@@ -73,7 +73,6 @@
     "escape-string-regexp": "^4.0.0"
   },
   "devDependencies": {
-    "eslint-plugin-regex": "^1.8.0",
     "mongoose": "^6.11.3",
     "socket.io-client": "^4.7.5",
     "swr": "^2.2.2"

+ 0 - 1
packages/presentation/package.json

@@ -47,7 +47,6 @@
     "@types/mdast": "^4.0.4",
     "@types/reveal.js": "^4.4.1",
     "@types/unist": "^3.0.3",
-    "eslint-plugin-regex": "^1.8.0",
     "hast-util-sanitize": "^5.0.1",
     "hast-util-select": "^6.0.2",
     "mdast-util-frontmatter": "^2.0.1",

+ 0 - 1
packages/remark-attachment-refs/package.json

@@ -60,7 +60,6 @@
     "@types/bunyan": "^1.8.11",
     "@types/hast": "^3.0.4",
     "csstype": "^3.0.2",
-    "eslint-plugin-regex": "^1.8.0",
     "hast-util-sanitize": "^5.0.1",
     "hast-util-select": "^6.0.2",
     "npm-run-all": "^4.1.5",

+ 0 - 1
packages/remark-drawio/package.json

@@ -35,7 +35,6 @@
     "@types/mdast": "^4.0.4",
     "@types/react": "^18.2.14",
     "@types/react-dom": "^18.2.6",
-    "eslint-plugin-regex": "^1.8.0",
     "hast-util-sanitize": "^5.0.1",
     "pako": "^2.1.0",
     "throttle-debounce": "^5.0.0",

+ 0 - 1
packages/remark-lsx/package.json

@@ -49,7 +49,6 @@
     "@types/hast": "^3.0.4",
     "axios": "^0.24.0",
     "is-absolute-url": "^4.0.1",
-    "eslint-plugin-regex": "^1.8.0",
     "hast-util-sanitize": "^5.0.1",
     "hast-util-select": "^6.0.2",
     "unified": "^11.0.0",

+ 1 - 2
packages/slack/package.json

@@ -68,7 +68,6 @@
   "devDependencies": {
     "@slack/types": "^2.14.0",
     "@types/express": "^4",
-    "@types/qs": "^6.9.16",
-    "eslint-plugin-regex": "^1.8.0"
+    "@types/qs": "^6.9.16"
   }
 }

+ 159 - 63
pnpm-lock.yaml

@@ -26,7 +26,7 @@ importers:
         version: 1.46.0
       '@swc-node/register':
         specifier: ^1.9.1
-        version: 1.10.0(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.7)(typescript@5.0.4)
+        version: 1.10.0(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.12)(typescript@5.0.4)
       '@swc/core':
         specifier: ^1.5.25
         version: 1.5.25(@swc/helpers@0.5.11)
@@ -93,6 +93,9 @@ importers:
       eslint-plugin-react-hooks:
         specifier: ^4.6.0
         version: 4.6.0(eslint@8.41.0)
+      eslint-plugin-regex:
+        specifier: ^1.8.0
+        version: 1.10.0(eslint@8.41.0)
       eslint-plugin-rulesdir:
         specifier: ^0.2.2
         version: 0.2.2
@@ -351,9 +354,6 @@ importers:
       escape-string-regexp:
         specifier: ^4.0.0
         version: 4.0.0
-      eslint-plugin-regex:
-        specifier: ^1.8.0
-        version: 1.10.0(eslint@8.41.0)
       expose-gc:
         specifier: ^1.0.0
         version: 1.0.0
@@ -490,8 +490,8 @@ importers:
         specifier: ^15.3.1
         version: 15.3.1(i18next@23.16.5)(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(react-i18next@15.1.1(i18next@23.16.5)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)
       next-superjson:
-        specifier: ^0.0.4
-        version: 0.0.4(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(superjson@1.13.3)(webpack@5.92.1(@swc/core@1.5.25(@swc/helpers@0.5.11)))
+        specifier: ^1.0.7
+        version: 1.0.7(@swc/helpers@0.5.11)(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(superjson@1.13.3)
       next-themes:
         specifier: ^0.2.1
         version: 0.2.1(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
@@ -738,7 +738,7 @@ importers:
         version: 2.11.8
       '@swc-node/jest':
         specifier: ^1.8.1
-        version: 1.8.3(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.7)(typescript@5.4.2)
+        version: 1.8.3(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.12)(typescript@5.4.2)
       '@swc/jest':
         specifier: ^0.2.36
         version: 0.2.36(@swc/core@1.5.25(@swc/helpers@0.5.11))
@@ -1119,9 +1119,6 @@ importers:
       bootstrap:
         specifier: '=5.3.2'
         version: 5.3.2(@popperjs/core@2.11.8)
-      eslint-plugin-regex:
-        specifier: ^1.8.0
-        version: 1.10.0(eslint@8.41.0)
       morgan:
         specifier: ^1.10.0
         version: 1.10.0
@@ -1135,9 +1132,6 @@ importers:
         specifier: ^4.0.0
         version: 4.0.0
     devDependencies:
-      eslint-plugin-regex:
-        specifier: ^1.8.0
-        version: 1.10.0(eslint@8.41.0)
       mongoose:
         specifier: ^6.11.3
         version: 6.13.0(@aws-sdk/client-sso-oidc@3.600.0)
@@ -1348,9 +1342,6 @@ importers:
       '@types/unist':
         specifier: ^3.0.3
         version: 3.0.3
-      eslint-plugin-regex:
-        specifier: ^1.8.0
-        version: 1.10.0(eslint@8.41.0)
       hast-util-sanitize:
         specifier: ^5.0.1
         version: 5.0.1
@@ -1473,9 +1464,6 @@ importers:
       csstype:
         specifier: ^3.0.2
         version: 3.1.3
-      eslint-plugin-regex:
-        specifier: ^1.8.0
-        version: 1.10.0(eslint@8.41.0)
       hast-util-sanitize:
         specifier: ^5.0.1
         version: 5.0.1
@@ -1510,9 +1498,6 @@ importers:
       '@types/react-dom':
         specifier: ^18.2.6
         version: 18.3.0
-      eslint-plugin-regex:
-        specifier: ^1.8.0
-        version: 1.10.0(eslint@8.41.0)
       hast-util-sanitize:
         specifier: ^5.0.1
         version: 5.0.1
@@ -1653,9 +1638,6 @@ importers:
       axios:
         specifier: ^0.24.0
         version: 0.24.0
-      eslint-plugin-regex:
-        specifier: ^1.8.0
-        version: 1.10.0(eslint@8.41.0)
       hast-util-sanitize:
         specifier: ^5.0.1
         version: 5.0.1
@@ -1729,9 +1711,6 @@ importers:
       '@types/qs':
         specifier: ^6.9.16
         version: 6.9.17
-      eslint-plugin-regex:
-        specifier: ^1.8.0
-        version: 1.10.0(eslint@8.41.0)
 
   packages/ui:
     dependencies:
@@ -4152,66 +4131,135 @@ packages:
   '@swc-node/sourcemap-support@0.5.0':
     resolution: {integrity: sha512-fbhjL5G0YvFoWwNhWleuBUfotiX+USiA9oJqu9STFw+Hb0Cgnddn+HVS/K5fI45mn92e8V+cHD2jgFjk4w2T9Q==}
 
+  '@swc/core-darwin-arm64@1.4.17':
+    resolution: {integrity: sha512-HVl+W4LezoqHBAYg2JCqR+s9ife9yPfgWSj37iIawLWzOmuuJ7jVdIB7Ee2B75bEisSEKyxRlTl6Y1Oq3owBgw==}
+    engines: {node: '>=10'}
+    cpu: [arm64]
+    os: [darwin]
+
   '@swc/core-darwin-arm64@1.5.25':
     resolution: {integrity: sha512-YbD0SBgVJS2DM0vwJTU5m7+wOyCjHPBDMf3nCBJQzFZzOLzK11eRW7SzU2jhJHr9HI9sKcNFfN4lIC2Sj+4inA==}
     engines: {node: '>=10'}
     cpu: [arm64]
     os: [darwin]
 
+  '@swc/core-darwin-x64@1.4.17':
+    resolution: {integrity: sha512-WYRO9Fdzq4S/he8zjW5I95G1zcvyd9yyD3Tgi4/ic84P5XDlSMpBDpBLbr/dCPjmSg7aUXxNQqKqGkl6dQxYlA==}
+    engines: {node: '>=10'}
+    cpu: [x64]
+    os: [darwin]
+
   '@swc/core-darwin-x64@1.5.25':
     resolution: {integrity: sha512-OhP4TROT6gQuozn+ah0Y4UidSdgDmxwtQq3lgCUIAxJYErJAQ82/Y0kve2UaNmkSGjOHU+/b4siHPrYTkXOk0Q==}
     engines: {node: '>=10'}
     cpu: [x64]
     os: [darwin]
 
+  '@swc/core-linux-arm-gnueabihf@1.4.17':
+    resolution: {integrity: sha512-cgbvpWOvtMH0XFjvwppUCR+Y+nf6QPaGu6AQ5hqCP+5Lv2zO5PG0RfasC4zBIjF53xgwEaaWmGP5/361P30X8Q==}
+    engines: {node: '>=10'}
+    cpu: [arm]
+    os: [linux]
+
   '@swc/core-linux-arm-gnueabihf@1.5.25':
     resolution: {integrity: sha512-tNmUfrAHxN2gvYPyYNnHx2CYlPO7DGAUuK/bZrqawu++djcg+atAV3eI3XYJgmHId7/sYAlDQ9wjkrOLofFjVg==}
     engines: {node: '>=10'}
     cpu: [arm]
     os: [linux]
 
+  '@swc/core-linux-arm64-gnu@1.4.17':
+    resolution: {integrity: sha512-l7zHgaIY24cF9dyQ/FOWbmZDsEj2a9gRFbmgx2u19e3FzOPuOnaopFj0fRYXXKCmtdx+anD750iBIYnTR+pq/Q==}
+    engines: {node: '>=10'}
+    cpu: [arm64]
+    os: [linux]
+
   '@swc/core-linux-arm64-gnu@1.5.25':
     resolution: {integrity: sha512-stzpke+bRaNFM/HrZPRjX0aQZ86S/2DChVCwb8NAV1n5lu9mz1CS750y7WbbtX/KZjk92FsCeRy2qwkvjI0gWw==}
     engines: {node: '>=10'}
     cpu: [arm64]
     os: [linux]
 
+  '@swc/core-linux-arm64-musl@1.4.17':
+    resolution: {integrity: sha512-qhH4gr9gAlVk8MBtzXbzTP3BJyqbAfUOATGkyUtohh85fPXQYuzVlbExix3FZXTwFHNidGHY8C+ocscI7uDaYw==}
+    engines: {node: '>=10'}
+    cpu: [arm64]
+    os: [linux]
+
   '@swc/core-linux-arm64-musl@1.5.25':
     resolution: {integrity: sha512-UckUfDYedish/bj2V1jgQDGgouLhyRpG7jgF3mp8jHir11V2K6JiTyjFoz99eOiclS3+hNdr4QLJ+ifrQMJNZw==}
     engines: {node: '>=10'}
     cpu: [arm64]
     os: [linux]
 
+  '@swc/core-linux-x64-gnu@1.4.17':
+    resolution: {integrity: sha512-vRDFATL1oN5oZMImkwbgSHEkp8xG1ofEASBypze01W1Tqto8t+yo6gsp69wzCZBlxldsvPpvFZW55Jq0Rn+UnA==}
+    engines: {node: '>=10'}
+    cpu: [x64]
+    os: [linux]
+
   '@swc/core-linux-x64-gnu@1.5.25':
     resolution: {integrity: sha512-LwbJEgNT3lXbvz4WFzVNXNvs8DvxpoXjMZk9K9Hig8tmZQJKHC2qZTGomcyK5EFzfj2HBuBXZnAEW8ZT9PcEaA==}
     engines: {node: '>=10'}
     cpu: [x64]
     os: [linux]
 
+  '@swc/core-linux-x64-musl@1.4.17':
+    resolution: {integrity: sha512-zQNPXAXn3nmPqv54JVEN8k2JMEcMTQ6veVuU0p5O+A7KscJq+AGle/7ZQXzpXSfUCXlLMX4wvd+rwfGhh3J4cw==}
+    engines: {node: '>=10'}
+    cpu: [x64]
+    os: [linux]
+
   '@swc/core-linux-x64-musl@1.5.25':
     resolution: {integrity: sha512-rsepMTgml0EkswWkBpg3Wrjj5eqjwTzZN5omAn1klzXSZnClTrfeHvBuoIJYVr1yx+jmBkqySgME2p7+magUAw==}
     engines: {node: '>=10'}
     cpu: [x64]
     os: [linux]
 
+  '@swc/core-win32-arm64-msvc@1.4.17':
+    resolution: {integrity: sha512-z86n7EhOwyzxwm+DLE5NoLkxCTme2lq7QZlDjbQyfCxOt6isWz8rkW5QowTX8w9Rdmk34ncrjSLvnHOeLY17+w==}
+    engines: {node: '>=10'}
+    cpu: [arm64]
+    os: [win32]
+
   '@swc/core-win32-arm64-msvc@1.5.25':
     resolution: {integrity: sha512-DJDsLBsRBV3uQBShRK2x6fqzABp9RLNVxDUpTTvUjc7qywJ8vS/yn+POK/zCyVEqLagf1z/8D5CEQ+RAIJq1NA==}
     engines: {node: '>=10'}
     cpu: [arm64]
     os: [win32]
 
+  '@swc/core-win32-ia32-msvc@1.4.17':
+    resolution: {integrity: sha512-JBwuSTJIgiJJX6wtr4wmXbfvOswHFj223AumUrK544QV69k60FJ9q2adPW9Csk+a8wm1hLxq4HKa2K334UHJ/g==}
+    engines: {node: '>=10'}
+    cpu: [ia32]
+    os: [win32]
+
   '@swc/core-win32-ia32-msvc@1.5.25':
     resolution: {integrity: sha512-BARL1ulHol53MEKC1ZVWM3A3FP757UUgG5Q8v97za+4a1SaIgbwvAQyHDxMYWi9+ij+OapK8YnWjJcFa17g8dw==}
     engines: {node: '>=10'}
     cpu: [ia32]
     os: [win32]
 
+  '@swc/core-win32-x64-msvc@1.4.17':
+    resolution: {integrity: sha512-jFkOnGQamtVDBm3MF5Kq1lgW8vx4Rm1UvJWRUfg+0gx7Uc3Jp3QMFeMNw/rDNQYRDYPG3yunCC+2463ycd5+dg==}
+    engines: {node: '>=10'}
+    cpu: [x64]
+    os: [win32]
+
   '@swc/core-win32-x64-msvc@1.5.25':
     resolution: {integrity: sha512-o+MHUWrQI9iR6EusEV8eNU2Ezi3KtlhUR4gfptQN5MbVzlgjTvQbhiKpE1GYOxp+0BLBbKRwITKOcdhxfEJ2Uw==}
     engines: {node: '>=10'}
     cpu: [x64]
     os: [win32]
 
+  '@swc/core@1.4.17':
+    resolution: {integrity: sha512-tq+mdWvodMBNBBZbwFIMTVGYHe9N7zvEaycVVjfvAx20k1XozHbHhRv+9pEVFJjwRxLdXmtvFZd3QZHRAOpoNQ==}
+    engines: {node: '>=10'}
+    peerDependencies:
+      '@swc/helpers': ^0.5.0
+    peerDependenciesMeta:
+      '@swc/helpers':
+        optional: true
+
   '@swc/core@1.5.25':
     resolution: {integrity: sha512-qdGEIdLVoTjEQ7w72UyyQ0wLFY4XbHfZiidmPHKJQsvSXzdpHXxPdlTCea/mY4AhMqo/M+pvkJSXJAxZnFl7qw==}
     engines: {node: '>=10'}
@@ -4236,6 +4284,9 @@ packages:
     peerDependencies:
       '@swc/core': '*'
 
+  '@swc/types@0.1.12':
+    resolution: {integrity: sha512-wBJA+SdtkbFhHjTMYH+dEH1y4VpfGdAc2Kw/LK09i9bXd/K6j6PkDcFCEzb6iVfZMkPRrl/q0e3toqTAJdkIVA==}
+
   '@swc/types@0.1.7':
     resolution: {integrity: sha512-scHWahbHF0eyj3JsxG9CFJgFdFNaVQCNAimBlT6PzS3n/HptxqREjsm4OH6AN3lYcffZYSPxXW8ua2BEHp0lJQ==}
 
@@ -5440,13 +5491,6 @@ packages:
     resolution: {integrity: sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==}
     engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
 
-  babel-plugin-superjson-next@0.4.5:
-    resolution: {integrity: sha512-k7S99Qpsbi3OSdlCMXEiklzxepM6QbYEIUsrjgSkpx+ksT0iNfdY2r1kCzBK2UjG8fLN6NZEKpDA8XpG2pbDSA==}
-    engines: {node: '>=10'}
-    peerDependencies:
-      next: '>=9.0.0'
-      superjson: 1.x
-
   babel-preset-current-node-syntax@1.0.1:
     resolution: {integrity: sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==}
     peerDependencies:
@@ -10181,8 +10225,14 @@ packages:
       react: '>= 17.0.2'
       react-i18next: '>= 13.5.0'
 
-  next-superjson@0.0.4:
-    resolution: {integrity: sha512-PYtoHbPcZYED8Vm9YCIQIZi/arANNnf6grwjkPuJXzWdY1TxJxrn9dCPmVj6ALvPn9YcDThwEA9WvHq/NyzMvw==}
+  next-superjson-plugin@0.6.3:
+    resolution: {integrity: sha512-gipGROzbbn1Koq84AZQodIvBdORp9dytIDv07SguwXdxnJb6v05KCmHVNU9L6AWqxjP14qNIWCNdKRDhnGRZrg==}
+    peerDependencies:
+      next: ^13.0 || ^14.0
+      superjson: ^1 || ^2
+
+  next-superjson@1.0.7:
+    resolution: {integrity: sha512-07zs+A+oyCmpJm4qwo5M8pjnBIrYgnd2eox77wafB1shdCSxbaHNwejCKMK2e2sPtJ1u+iNPG1bG4mO6xwOz6g==}
     peerDependencies:
       next: '>=10'
 
@@ -17497,25 +17547,25 @@ snapshots:
       '@stoplight/yaml-ast-parser': 0.0.50
       tslib: 2.8.0
 
-  '@swc-node/core@1.13.1(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.7)':
+  '@swc-node/core@1.13.1(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.12)':
     dependencies:
       '@swc/core': 1.5.25(@swc/helpers@0.5.11)
-      '@swc/types': 0.1.7
+      '@swc/types': 0.1.12
 
-  '@swc-node/jest@1.8.3(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.7)(typescript@5.4.2)':
+  '@swc-node/jest@1.8.3(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.12)(typescript@5.4.2)':
     dependencies:
       '@node-rs/xxhash': 1.7.3
-      '@swc-node/core': 1.13.1(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.7)
-      '@swc-node/register': 1.10.0(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.7)(typescript@5.4.2)
+      '@swc-node/core': 1.13.1(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.12)
+      '@swc-node/register': 1.10.0(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.12)(typescript@5.4.2)
       '@swc/core': 1.5.25(@swc/helpers@0.5.11)
-      '@swc/types': 0.1.7
+      '@swc/types': 0.1.12
       typescript: 5.4.2
     transitivePeerDependencies:
       - supports-color
 
-  '@swc-node/register@1.10.0(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.7)(typescript@5.0.4)':
+  '@swc-node/register@1.10.0(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.12)(typescript@5.0.4)':
     dependencies:
-      '@swc-node/core': 1.13.1(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.7)
+      '@swc-node/core': 1.13.1(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.12)
       '@swc-node/sourcemap-support': 0.5.0
       '@swc/core': 1.5.25(@swc/helpers@0.5.11)
       colorette: 2.0.20
@@ -17527,9 +17577,9 @@ snapshots:
       - '@swc/types'
       - supports-color
 
-  '@swc-node/register@1.10.0(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.7)(typescript@5.4.2)':
+  '@swc-node/register@1.10.0(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.12)(typescript@5.4.2)':
     dependencies:
-      '@swc-node/core': 1.13.1(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.7)
+      '@swc-node/core': 1.13.1(@swc/core@1.5.25(@swc/helpers@0.5.11))(@swc/types@0.1.12)
       '@swc-node/sourcemap-support': 0.5.0
       '@swc/core': 1.5.25(@swc/helpers@0.5.11)
       colorette: 2.0.20
@@ -17546,36 +17596,83 @@ snapshots:
       source-map-support: 0.5.21
       tslib: 2.8.0
 
+  '@swc/core-darwin-arm64@1.4.17':
+    optional: true
+
   '@swc/core-darwin-arm64@1.5.25':
     optional: true
 
+  '@swc/core-darwin-x64@1.4.17':
+    optional: true
+
   '@swc/core-darwin-x64@1.5.25':
     optional: true
 
+  '@swc/core-linux-arm-gnueabihf@1.4.17':
+    optional: true
+
   '@swc/core-linux-arm-gnueabihf@1.5.25':
     optional: true
 
+  '@swc/core-linux-arm64-gnu@1.4.17':
+    optional: true
+
   '@swc/core-linux-arm64-gnu@1.5.25':
     optional: true
 
+  '@swc/core-linux-arm64-musl@1.4.17':
+    optional: true
+
   '@swc/core-linux-arm64-musl@1.5.25':
     optional: true
 
+  '@swc/core-linux-x64-gnu@1.4.17':
+    optional: true
+
   '@swc/core-linux-x64-gnu@1.5.25':
     optional: true
 
+  '@swc/core-linux-x64-musl@1.4.17':
+    optional: true
+
   '@swc/core-linux-x64-musl@1.5.25':
     optional: true
 
+  '@swc/core-win32-arm64-msvc@1.4.17':
+    optional: true
+
   '@swc/core-win32-arm64-msvc@1.5.25':
     optional: true
 
+  '@swc/core-win32-ia32-msvc@1.4.17':
+    optional: true
+
   '@swc/core-win32-ia32-msvc@1.5.25':
     optional: true
 
+  '@swc/core-win32-x64-msvc@1.4.17':
+    optional: true
+
   '@swc/core-win32-x64-msvc@1.5.25':
     optional: true
 
+  '@swc/core@1.4.17(@swc/helpers@0.5.11)':
+    dependencies:
+      '@swc/counter': 0.1.3
+      '@swc/types': 0.1.12
+    optionalDependencies:
+      '@swc/core-darwin-arm64': 1.4.17
+      '@swc/core-darwin-x64': 1.4.17
+      '@swc/core-linux-arm-gnueabihf': 1.4.17
+      '@swc/core-linux-arm64-gnu': 1.4.17
+      '@swc/core-linux-arm64-musl': 1.4.17
+      '@swc/core-linux-x64-gnu': 1.4.17
+      '@swc/core-linux-x64-musl': 1.4.17
+      '@swc/core-win32-arm64-msvc': 1.4.17
+      '@swc/core-win32-ia32-msvc': 1.4.17
+      '@swc/core-win32-x64-msvc': 1.4.17
+      '@swc/helpers': 0.5.11
+
   '@swc/core@1.5.25(@swc/helpers@0.5.11)':
     dependencies:
       '@swc/counter': 0.1.3
@@ -17611,6 +17708,10 @@ snapshots:
       '@swc/counter': 0.1.3
       jsonc-parser: 3.2.0
 
+  '@swc/types@0.1.12':
+    dependencies:
+      '@swc/counter': 0.1.3
+
   '@swc/types@0.1.7':
     dependencies:
       '@swc/counter': 0.1.3
@@ -19346,14 +19447,6 @@ snapshots:
       '@types/babel__core': 7.20.5
       '@types/babel__traverse': 7.0.7
 
-  babel-plugin-superjson-next@0.4.5(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(superjson@1.13.3):
-    dependencies:
-      '@babel/helper-module-imports': 7.24.6
-      '@babel/types': 7.25.6
-      hoist-non-react-statics: 3.3.2
-      next: 14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6)
-      superjson: 1.13.3
-
   babel-preset-current-node-syntax@1.0.1(@babel/core@7.24.6):
     dependencies:
       '@babel/core': 7.24.6
@@ -24859,18 +24952,21 @@ snapshots:
       react: 18.2.0
       react-i18next: 15.1.1(i18next@23.16.5)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
 
-  next-superjson@0.0.4(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(superjson@1.13.3)(webpack@5.92.1(@swc/core@1.5.25(@swc/helpers@0.5.11))):
+  next-superjson-plugin@0.6.3(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(superjson@1.13.3):
     dependencies:
-      '@babel/core': 7.24.6
-      '@babel/plugin-syntax-jsx': 7.24.7(@babel/core@7.24.6)
-      '@babel/plugin-syntax-typescript': 7.24.7(@babel/core@7.24.6)
-      babel-loader: 8.3.0(@babel/core@7.24.6)(webpack@5.92.1(@swc/core@1.5.25(@swc/helpers@0.5.11)))
-      babel-plugin-superjson-next: 0.4.5(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(superjson@1.13.3)
+      hoist-non-react-statics: 3.3.2
       next: 14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6)
+      superjson: 1.13.3
+
+  next-superjson@1.0.7(@swc/helpers@0.5.11)(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(superjson@1.13.3):
+    dependencies:
+      '@swc/core': 1.4.17(@swc/helpers@0.5.11)
+      '@swc/types': 0.1.12
+      next: 14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6)
+      next-superjson-plugin: 0.6.3(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(superjson@1.13.3)
     transitivePeerDependencies:
+      - '@swc/helpers'
       - superjson
-      - supports-color
-      - webpack
 
   next-themes@0.2.1(next@14.2.13(@babel/core@7.24.6)(@playwright/test@1.46.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.77.6))(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
     dependencies: