Просмотр исходного кода

Merge branch 'master' into fix/156800-csrf-protection-origin

yusa aihara 6 месяцев назад
Родитель
Сommit
c4c57b825d
100 измененных файлов с 883 добавлено и 591 удалено
  1. 1 1
      .devcontainer/app/devcontainer.json
  2. 1 1
      .devcontainer/compose.extend.template.yml
  3. 5 5
      .github/mergify.yml
  4. 4 4
      .github/workflows/ci-app-prod.yml
  5. 1 1
      .github/workflows/ci-app.yml
  6. 2 2
      .github/workflows/ci-pdf-converter.yml
  7. 3 3
      .github/workflows/ci-slackbot-proxy.yml
  8. 1 1
      .github/workflows/list-unhealthy-branches.yml
  9. 1 1
      .github/workflows/release-pdf-converter.yml
  10. 30 8
      .github/workflows/release-rc.yml
  11. 1 1
      .github/workflows/release-slackbot-proxy.yml
  12. 2 2
      .github/workflows/release-subpackages.yml
  13. 2 2
      .github/workflows/release.yml
  14. 0 83
      .serena/memories/git-bisect-memory-consumption-investigation-plan.md
  15. 13 7
      .serena/memories/suggested_commands.md
  16. 9 1
      CHANGELOG.md
  17. 5 5
      README.md
  18. 5 5
      README_JP.md
  19. 1 1
      apps/app/bin/openapi/generate-operation-ids/cli.spec.ts
  20. 1 1
      apps/app/bin/openapi/generate-operation-ids/cli.ts
  21. 3 3
      apps/app/bin/openapi/generate-operation-ids/generate-operation-ids.spec.ts
  22. 54 18
      apps/app/bin/print-memory-consumption.ts
  23. 0 14
      apps/app/config/cdn.js
  24. 1 1
      apps/app/config/migrate-mongo-config.js
  25. 1 0
      apps/app/config/next-i18next.config.js
  26. 2 2
      apps/app/docker/Dockerfile
  27. 5 5
      apps/app/docker/README.md
  28. 2 2
      apps/app/next.config.js
  29. 2 2
      apps/app/package.json
  30. 18 8
      apps/app/src/client/components/Admin/ElasticsearchManagement/ElasticsearchManagement.tsx
  31. 22 31
      apps/app/src/client/components/Admin/ImportData/GrowiArchive/ImportCollectionItem.jsx
  32. 5 2
      apps/app/src/client/components/TreeItem/TreeItemLayout.tsx
  33. 1 1
      apps/app/src/features/comment/server/events/event-emitter.ts
  34. 1 1
      apps/app/src/features/external-user-group/client/components/ExternalUserGroup/ExternalUserGroupManagement.tsx
  35. 1 0
      apps/app/src/features/external-user-group/server/models/external-user-group-relation.ts
  36. 1 0
      apps/app/src/features/external-user-group/server/models/external-user-group.ts
  37. 1 1
      apps/app/src/features/external-user-group/server/routes/apiv3/external-user-group.ts
  38. 1 1
      apps/app/src/features/growi-plugin/server/consts/index.ts
  39. 1 1
      apps/app/src/features/opentelemetry/server/anonymization/handlers/page-access-handler.spec.ts
  40. 6 3
      apps/app/src/features/opentelemetry/server/anonymization/handlers/page-access-handler.ts
  41. 1 1
      apps/app/src/features/opentelemetry/server/anonymization/handlers/page-api-handler.spec.ts
  42. 5 2
      apps/app/src/features/opentelemetry/server/anonymization/handlers/page-api-handler.ts
  43. 1 1
      apps/app/src/features/opentelemetry/server/anonymization/handlers/page-listing-api-handler.spec.ts
  44. 5 2
      apps/app/src/features/opentelemetry/server/anonymization/handlers/page-listing-api-handler.ts
  45. 1 1
      apps/app/src/features/opentelemetry/server/anonymization/handlers/search-api-handler.spec.ts
  46. 1 1
      apps/app/src/features/opentelemetry/server/anonymization/handlers/search-api-handler.ts
  47. 1 1
      apps/app/src/features/opentelemetry/server/anonymization/interfaces/anonymization-module.ts
  48. 1 1
      apps/app/src/features/opentelemetry/server/custom-metrics/application-metrics.spec.ts
  49. 1 1
      apps/app/src/features/opentelemetry/server/custom-metrics/application-metrics.ts
  50. 2 2
      apps/app/src/features/opentelemetry/server/logger.ts
  51. 1 1
      apps/app/src/features/opentelemetry/server/node-sdk-configuration.ts
  52. 1 1
      apps/app/src/features/opentelemetry/server/node-sdk-resource.ts
  53. 1 1
      apps/app/src/features/page-bulk-export/client/components/PageBulkExportSelectModal.tsx
  54. 3 3
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/index.ts
  55. 2 2
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/create-page-snapshots-async.ts
  56. 3 3
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/export-pages-to-fs-async.ts
  57. 2 2
      apps/app/src/features/templates/server/routes/apiv3/index.ts
  58. 2 2
      apps/app/src/migrations/20211227060705-revision-path-to-page-id-schema-migration--fixed-7549.js
  59. 2 0
      apps/app/src/server/crowi/express-init.js
  60. 5 3
      apps/app/src/server/crowi/index.js
  61. 3 2
      apps/app/src/server/routes/apiv3/g2g-transfer.ts
  62. 23 17
      apps/app/src/server/routes/apiv3/import.ts
  63. 3 3
      apps/app/src/server/routes/attachment/get.ts
  64. 1 1
      apps/app/src/server/routes/ogp.ts
  65. 5 0
      apps/app/src/server/service/config-manager/config-definition.ts
  66. 1 1
      apps/app/src/server/service/export.ts
  67. 59 20
      apps/app/src/server/service/file-uploader/aws/index.ts
  68. 81 16
      apps/app/src/server/service/file-uploader/azure.ts
  69. 34 10
      apps/app/src/server/service/file-uploader/gcs/index.ts
  70. 114 52
      apps/app/src/server/service/file-uploader/gridfs.ts
  71. 25 4
      apps/app/src/server/service/file-uploader/local.ts
  72. 47 47
      apps/app/src/server/service/file-uploader/utils/headers.ts
  73. 6 6
      apps/app/src/server/service/g2g-transfer.ts
  74. 1 3
      apps/app/src/server/service/growi-bridge/index.ts
  75. 1 1
      apps/app/src/server/service/i18next.ts
  76. 23 3
      apps/app/src/server/service/import/construct-convert-map.ts
  77. 131 95
      apps/app/src/server/service/import/import.ts
  78. 34 8
      apps/app/src/server/service/search-delegator/elasticsearch.ts
  79. 3 4
      apps/app/src/server/util/project-dir-utils.ts
  80. 1 2
      apps/app/src/stores-universal/context.tsx
  81. 1 0
      apps/app/src/stores-universal/use-context-swr.tsx
  82. 2 2
      apps/app/src/utils/next.config.utils.js
  83. 1 1
      apps/app/test/integration/models/page-redirect.test.js
  84. 1 1
      apps/app/test/integration/models/page.test.js
  85. 1 1
      apps/app/test/integration/models/user.test.js
  86. 1 1
      apps/app/test/integration/setup-crowi.ts
  87. 2 2
      apps/pdf-converter/docker/Dockerfile
  88. 1 1
      apps/pdf-converter/package.json
  89. 2 2
      apps/slackbot-proxy/docker/Dockerfile
  90. 1 1
      apps/slackbot-proxy/package.json
  91. 7 2
      biome.json
  92. 3 3
      package.json
  93. 1 1
      packages/presentation/src/client/components/GrowiSlides.tsx
  94. 1 1
      packages/presentation/src/client/services/renderer/extract-sections.ts
  95. 1 1
      packages/remark-attachment-refs/src/client/components/ExtractedAttachments.tsx
  96. 2 2
      packages/remark-attachment-refs/src/server/routes/refs.ts
  97. 1 1
      packages/remark-drawio/src/components/DrawioViewer.tsx
  98. 0 8
      packages/remark-growi-directive/src/mdast-util-growi-directive/lib/index.js
  99. 0 2
      packages/remark-growi-directive/src/micromark-extension-growi-directive/lib/factory-attributes.js
  100. 1 1
      packages/remark-lsx/src/client/components/Lsx.tsx

+ 1 - 1
.devcontainer/app/devcontainer.json

@@ -8,7 +8,7 @@
 
 
   "features": {
   "features": {
     "ghcr.io/devcontainers/features/node:1": {
     "ghcr.io/devcontainers/features/node:1": {
-      "version": "22.17.0"
+      "version": "20.18.3"
     }
     }
   },
   },
 
 

+ 1 - 1
.devcontainer/compose.extend.template.yml

@@ -3,7 +3,7 @@
 services:
 services:
   pdf-converter:
   pdf-converter:
     # enabling devcontainer 'features' was not working for secondary devcontainer (https://github.com/devcontainers/features/issues/1175)
     # enabling devcontainer 'features' was not working for secondary devcontainer (https://github.com/devcontainers/features/issues/1175)
-    image: mcr.microsoft.com/vscode/devcontainers/javascript-node:1-22
+    image: mcr.microsoft.com/vscode/devcontainers/javascript-node:1-20
     volumes:
     volumes:
       - ..:/workspace/growi:delegated
       - ..:/workspace/growi:delegated
       - pnpm-store:/workspace/.pnpm-store
       - pnpm-store:/workspace/.pnpm-store

+ 5 - 5
.github/mergify.yml

@@ -7,17 +7,17 @@ queue_rules:
       - check-success ~= ci-app-launch-dev
       - check-success ~= ci-app-launch-dev
       - -check-failure ~= ci-app-
       - -check-failure ~= ci-app-
       - -check-failure ~= ci-slackbot-
       - -check-failure ~= ci-slackbot-
-      - -check-failure ~= test-prod-node22 /
+      - -check-failure ~= test-prod-node20 /
     merge_conditions:
     merge_conditions:
       - check-success ~= ci-app-lint
       - check-success ~= ci-app-lint
       - check-success ~= ci-app-test
       - check-success ~= ci-app-test
       - check-success ~= ci-app-launch-dev
       - check-success ~= ci-app-launch-dev
-      - check-success ~= test-prod-node22 / build-prod
-      - check-success ~= test-prod-node22 / launch-prod
-      - check-success ~= test-prod-node22 / run-playwright
+      - check-success = test-prod-node20 / build-prod
+      - check-success ~= test-prod-node20 / launch-prod
+      - check-success ~= test-prod-node20 / run-playwright
       - -check-failure ~= ci-app-
       - -check-failure ~= ci-app-
       - -check-failure ~= ci-slackbot-
       - -check-failure ~= ci-slackbot-
-      - -check-failure ~= test-prod-node22 /
+      - -check-failure ~= test-prod-node20 /
 
 
 pull_request_rules:
 pull_request_rules:
   - name: Automatic queue to merge
   - name: Automatic queue to merge

+ 4 - 4
.github/workflows/ci-app-prod.yml

@@ -39,7 +39,7 @@ concurrency:
 
 
 jobs:
 jobs:
 
 
-  test-prod-node20:
+  test-prod-node18:
     uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@master
     uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@master
     if: |
     if: |
       ( github.event_name == 'push'
       ( github.event_name == 'push'
@@ -48,13 +48,13 @@ jobs:
         || startsWith( github.base_ref, 'release/' )
         || startsWith( github.base_ref, 'release/' )
         || startsWith( github.head_ref, 'mergify/merge-queue/' ))
         || startsWith( github.head_ref, 'mergify/merge-queue/' ))
     with:
     with:
-      node-version: 20.x
+      node-version: 18.x
       skip-e2e-test: true
       skip-e2e-test: true
     secrets:
     secrets:
       SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
       SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
 
 
 
 
-  test-prod-node22:
+  test-prod-node20:
     uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@master
     uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@master
     if: |
     if: |
       ( github.event_name == 'push'
       ( github.event_name == 'push'
@@ -63,7 +63,7 @@ jobs:
         || startsWith( github.base_ref, 'release/' )
         || startsWith( github.base_ref, 'release/' )
         || startsWith( github.head_ref, 'mergify/merge-queue/' ))
         || startsWith( github.head_ref, 'mergify/merge-queue/' ))
     with:
     with:
-      node-version: 22.x
+      node-version: 20.x
       skip-e2e-test: ${{ contains( github.event.pull_request.labels.*.name, 'dependencies' ) }}
       skip-e2e-test: ${{ contains( github.event.pull_request.labels.*.name, 'dependencies' ) }}
     secrets:
     secrets:
       SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
       SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

+ 1 - 1
.github/workflows/ci-app.yml

@@ -44,7 +44,7 @@ jobs:
 
 
     strategy:
     strategy:
       matrix:
       matrix:
-        node-version: [22.x]
+        node-version: [20.x]
 
 
     steps:
     steps:
       - uses: actions/checkout@v4
       - uses: actions/checkout@v4

+ 2 - 2
.github/workflows/ci-pdf-converter.yml

@@ -29,7 +29,7 @@ jobs:
 
 
     strategy:
     strategy:
       matrix:
       matrix:
-        node-version: [22.x]
+        node-version: [20.x]
 
 
     steps:
     steps:
     - uses: actions/checkout@v4
     - uses: actions/checkout@v4
@@ -104,7 +104,7 @@ jobs:
 
 
     strategy:
     strategy:
       matrix:
       matrix:
-        node-version: [22.x]
+        node-version: [20.x]
 
 
     steps:
     steps:
     - uses: actions/checkout@v4
     - uses: actions/checkout@v4

+ 3 - 3
.github/workflows/ci-slackbot-proxy.yml

@@ -30,7 +30,7 @@ jobs:
 
 
     strategy:
     strategy:
       matrix:
       matrix:
-        node-version: [22.x]
+        node-version: [20.x]
 
 
     steps:
     steps:
     - uses: actions/checkout@v4
     - uses: actions/checkout@v4
@@ -85,7 +85,7 @@ jobs:
 
 
     strategy:
     strategy:
       matrix:
       matrix:
-        node-version: [22.x]
+        node-version: [20.x]
 
 
     services:
     services:
       mysql:
       mysql:
@@ -163,7 +163,7 @@ jobs:
 
 
     strategy:
     strategy:
       matrix:
       matrix:
-        node-version: [22.x]
+        node-version: [20.x]
 
 
     services:
     services:
       mysql:
       mysql:

+ 1 - 1
.github/workflows/list-unhealthy-branches.yml

@@ -16,7 +16,7 @@ jobs:
 
 
     - uses: actions/setup-node@v4
     - uses: actions/setup-node@v4
       with:
       with:
-        node-version: '20'
+        node-version: '18'
 
 
     - name: List branches
     - name: List branches
       id: list-branches
       id: list-branches

+ 1 - 1
.github/workflows/release-pdf-converter.yml

@@ -72,7 +72,7 @@ jobs:
 
 
     strategy:
     strategy:
       matrix:
       matrix:
-        node-version: [22.x]
+        node-version: [20.x]
 
 
     steps:
     steps:
     - uses: actions/checkout@v4
     - uses: actions/checkout@v4

+ 30 - 8
.github/workflows/release-rc.yml

@@ -17,7 +17,8 @@ jobs:
     runs-on: ubuntu-latest
     runs-on: ubuntu-latest
 
 
     outputs:
     outputs:
-      TAGS: ${{ steps.meta.outputs.tags }}
+      TAGS_WESEEK: ${{ steps.meta-weseek.outputs.tags }}
+      TAGS_GROWILABS: ${{ steps.meta-growilabs.outputs.tags }}
 
 
     steps:
     steps:
     - uses: actions/checkout@v4
     - uses: actions/checkout@v4
@@ -26,9 +27,9 @@ jobs:
       uses: myrotvorets/info-from-package-json-action@v2.0.2
       uses: myrotvorets/info-from-package-json-action@v2.0.2
       id: package-json
       id: package-json
 
 
-    - name: Docker meta for docker.io
+    - name: Docker meta for weseek/growi
       uses: docker/metadata-action@v5
       uses: docker/metadata-action@v5
-      id: meta
+      id: meta-weseek
       with:
       with:
         images: docker.io/weseek/growi
         images: docker.io/weseek/growi
         sep-tags: ','
         sep-tags: ','
@@ -36,6 +37,15 @@ jobs:
           type=raw,value=${{ steps.package-json.outputs.packageVersion }}
           type=raw,value=${{ steps.package-json.outputs.packageVersion }}
           type=raw,value=${{ steps.package-json.outputs.packageVersion }}.{{sha}}
           type=raw,value=${{ steps.package-json.outputs.packageVersion }}.{{sha}}
 
 
+    - name: Docker meta for growilabs/growi
+      uses: docker/metadata-action@v5
+      id: meta-growilabs
+      with:
+        images: docker.io/growilabs/growi
+        sep-tags: ','
+        tags: |
+          type=raw,value=${{ steps.package-json.outputs.packageVersion }}
+          type=raw,value=${{ steps.package-json.outputs.packageVersion }}.{{sha}}
 
 
   build-image-rc:
   build-image-rc:
     uses: growilabs/growi/.github/workflows/reusable-app-build-image.yml@master
     uses: growilabs/growi/.github/workflows/reusable-app-build-image.yml@master
@@ -46,16 +56,28 @@ jobs:
       AWS_ROLE_TO_ASSUME_FOR_OIDC: ${{ secrets.AWS_ROLE_TO_ASSUME_FOR_OIDC }}
       AWS_ROLE_TO_ASSUME_FOR_OIDC: ${{ secrets.AWS_ROLE_TO_ASSUME_FOR_OIDC }}
 
 
 
 
-  publish-image-rc:
+  publish-rc-image-for-growilabs:
     needs: [determine-tags, build-image-rc]
     needs: [determine-tags, build-image-rc]
 
 
     uses: growilabs/growi/.github/workflows/reusable-app-create-manifests.yml@master
     uses: growilabs/growi/.github/workflows/reusable-app-create-manifests.yml@master
     with:
     with:
-      tags: ${{ needs.determine-tags.outputs.TAGS }}
+      tags: ${{ needs.determine-tags.outputs.TAGS_GROWILABS }}
       registry: docker.io
       registry: docker.io
-      image-name: weseek/growi
-      docker-registry-username: wsmoogle
+      image-name: 'growilabs/growi'
+      docker-registry-username: 'growimoogle'
       tag-temporary: latest-rc
       tag-temporary: latest-rc
     secrets:
     secrets:
-      DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
+      DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD_GROWIMOOGLE }}
 
 
+  publish-rc-image-for-weseek:
+    needs: [determine-tags, build-image-rc]
+
+    uses: growilabs/growi/.github/workflows/reusable-app-create-manifests.yml@master
+    with:
+      tags: ${{ needs.determine-tags.outputs.TAGS_WESEEK }}
+      registry: docker.io
+      image-name: 'growilabs/growi'
+      docker-registry-username: 'wsmoogle'
+      tag-temporary: latest-rc
+    secrets:
+      DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}

+ 1 - 1
.github/workflows/release-slackbot-proxy.yml

@@ -92,7 +92,7 @@ jobs:
 
 
     - uses: actions/setup-node@v4
     - uses: actions/setup-node@v4
       with:
       with:
-        node-version: '20'
+        node-version: '18'
         cache: 'pnpm'
         cache: 'pnpm'
 
 
     - name: Install dependencies
     - name: Install dependencies

+ 2 - 2
.github/workflows/release-subpackages.yml

@@ -32,7 +32,7 @@ jobs:
 
 
     - uses: actions/setup-node@v4
     - uses: actions/setup-node@v4
       with:
       with:
-        node-version: '22'
+        node-version: '20'
         cache: 'pnpm'
         cache: 'pnpm'
 
 
     - name: Install dependencies
     - name: Install dependencies
@@ -75,7 +75,7 @@ jobs:
 
 
     - uses: actions/setup-node@v4
     - uses: actions/setup-node@v4
       with:
       with:
-        node-version: '22'
+        node-version: '20'
         cache: 'pnpm'
         cache: 'pnpm'
 
 
     - name: Install dependencies
     - name: Install dependencies

+ 2 - 2
.github/workflows/release.yml

@@ -27,7 +27,7 @@ jobs:
 
 
     - uses: actions/setup-node@v4
     - uses: actions/setup-node@v4
       with:
       with:
-        node-version: '22'
+        node-version: '20'
         cache: 'pnpm'
         cache: 'pnpm'
 
 
     - name: Install dependencies
     - name: Install dependencies
@@ -198,7 +198,7 @@ jobs:
 
 
     - uses: actions/setup-node@v4
     - uses: actions/setup-node@v4
       with:
       with:
-        node-version: '22'
+        node-version: '20'
         cache: 'pnpm'
         cache: 'pnpm'
 
 
     - name: Install dependencies
     - name: Install dependencies

+ 0 - 83
.serena/memories/git-bisect-memory-consumption-investigation-plan.md

@@ -1,83 +0,0 @@
-# git bisectによるメモリ消費量増加の原因特定調査計画
-
-## 調査目的
-2025/7/1以降、production buildしたサーバーのメモリ利用量(Heap Total)が約25%~33%増加した原因コミットを特定する。
-
-## 判定基準
-- **Good:** Heap Total ≒ 90MB
-- **Bad:** Heap Total ≒ 110MB
-
-## 調査範囲
-- 開始コミット: タグ `v7.2.9` (acdccb05538b72a593d690ce042922d6b71a4a63)
-- 終了コミット: master (db1d378da55ffa8c08b4f1a0cca3b6a2a3e2c219)
-
-## 実行手順
-1. 対象コミットをチェックアウト
-   ```bash
-   git checkout {target-commit}
-   ```
-2. ビルド
-   ```bash
-   cd /workspace/growi/apps/app
-   turbo run bootstrap
-   turbo run build
-   ```
-3. サーバー起動
-   ```bash
-   NODE_ENV=production node --inspect -r dotenv-flow/config dist/server/app.js
-   ```
-   サーバーはバックグラウンドで起動し、プロセスIDを /tmp/growi_server.pid に記録
-4. 10秒 sleep してからメモリ消費量計測
-   ```bash
-   sleep 10
-   cp /home/vscode/print-memory-consumption.ts tmp/
-   node --experimental-strip-types --experimental-transform-types --experimental-detect-module --no-warnings=ExperimentalWarning tmp/print-memory-consumption.ts
-   ```
-5. サーバー停止
-  ```bash
-  kill $(cat /tmp/growi_server.pid) && rm /tmp/growi_server.pid
-  ```
-6. Heap Total値でGood/Bad判定
-
-## 注意事項
-- サーバー起動直後の値で判定する(アクセスによるメモリリークの可能性もあるため、なるべくアクセス前に計測)。
-- 必要に応じて複数回計測し、安定した値を採用する。
-- bisectの自動化には、Heap Total値の判定をスクリプト化することで効率化可能。
-
----
-
-# git bisect 実施指示書
-
-1. bisect開始
-   ```bash
-   git bisect start
-   git bisect bad master
-   git bisect good v7.2.9
-   ```
-2. 各コミットで以下を実施
-   - 上記「実行手順」に従いビルド・起動・計測
-   - Heap Total値でGood/Bad判定
-   - 判定結果に応じて
-     ```bash
-     git bisect good
-     # または
-     git bisect bad
-     ```
-3. bisect終了後、原因コミットを記録
-   ```bash
-   git bisect reset
-   ```
-
----
-
-## 参考: 判定自動化例(bashスクリプト)
-
-```bash
-HEAP_TOTAL=$(node .../print-memory-consumption.ts | grep 'Heap Total' | awk '{print $3}')
-if (( $(echo "$HEAP_TOTAL < 100" | bc -l) )); then
-  exit 0  # good
-else
-  exit 1  # bad
-fi
-```
-bisect runで自動化する場合はこのスクリプトを利用してください。

+ 13 - 7
.serena/memories/suggested_commands.md

@@ -11,7 +11,7 @@ pnpm install
 ## 開発サーバー
 ## 開発サーバー
 ```bash
 ```bash
 # メインアプリケーション開発モード
 # メインアプリケーション開発モード
-cd apps/app && pnpm run dev
+cd /workspace/growi/apps/app && pnpm run dev
 
 
 # ルートから起動(本番用ビルド後)
 # ルートから起動(本番用ビルド後)
 pnpm start
 pnpm start
@@ -31,20 +31,26 @@ turbo run build
 
 
 ## Lint・フォーマット
 ## Lint・フォーマット
 ```bash
 ```bash
+# 全てのLint実行
+pnpm run lint
+```
+
+## apps/app の Lint・フォーマット
+```bash
 # 【推奨】Biome実行(lint + format)
 # 【推奨】Biome実行(lint + format)
-pnpm run lint:biome
+cd /workspace/growi/apps/app pnpm run lint:biome
 
 
 # 【過渡期】ESLint実行(廃止予定)
 # 【過渡期】ESLint実行(廃止予定)
-pnpm run lint:eslint
+cd /workspace/growi/apps/app pnpm run lint:eslint
 
 
 # Stylelint実行
 # Stylelint実行
-pnpm run lint:styles
+cd /workspace/growi/apps/app pnpm run lint:styles
 
 
-# 全てのLint実行(過渡期対応)
-pnpm run lint
+# 全てのLint実行
+cd /workspace/growi/apps/app pnpm run lint
 
 
 # TypeScript型チェック
 # TypeScript型チェック
-pnpm run lint:typecheck
+cd /workspace/growi/apps/app pnpm run lint:typecheck
 ```
 ```
 
 
 ## テスト
 ## テスト

+ 9 - 1
CHANGELOG.md

@@ -1,9 +1,17 @@
 # Changelog
 # Changelog
 
 
-## [Unreleased](https://github.com/growilabs/compare/v7.3.0...HEAD)
+## [Unreleased](https://github.com/growilabs/compare/v7.3.1...HEAD)
 
 
 *Please do not manually update this file. We've automated the process.*
 *Please do not manually update this file. We've automated the process.*
 
 
+## [v7.3.1](https://github.com/growilabs/compare/v7.3.0...v7.3.1) - 2025-09-22
+
+### 🧰 Maintenance
+
+* support: Revert Node.js runtime version to v18/20 for memory consumption
+* ci(deps-dev): bump vite from 5.4.19 to 5.4.20 (#10294) @[dependabot[bot]](https://github.com/apps/dependabot)
+* ci(deps): bump next from 14.2.30 to 14.2.32 (#10312) @[dependabot[bot]](https://github.com/apps/dependabot)
+
 ## [v7.3.0](https://github.com/growilabs/compare/v7.2.10...v7.3.0) - 2025-09-11
 ## [v7.3.0](https://github.com/growilabs/compare/v7.2.10...v7.3.0) - 2025-09-11
 
 
 ### 💎 Features
 ### 💎 Features

+ 5 - 5
README.md

@@ -16,7 +16,7 @@
 
 
 # GROWI
 # GROWI
 
 
-[![docker pulls](https://img.shields.io/docker/pulls/weseek/growi.svg)](https://hub.docker.com/r/weseek/growi/)
+[![docker pulls](https://img.shields.io/docker/pulls/growilabs/growi.svg)](https://hub.docker.com/r/growilabs/growi/)
 [![CodeQL](https://github.com/growilabs/growi/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/codeql-analysis.yml)
 [![CodeQL](https://github.com/growilabs/growi/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/codeql-analysis.yml)
 [![Node CI for app development](https://github.com/growilabs/growi/actions/workflows/ci-app.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/ci-app.yml)
 [![Node CI for app development](https://github.com/growilabs/growi/actions/workflows/ci-app.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/ci-app.yml)
 [![Node CI for app production](https://github.com/growilabs/growi/actions/workflows/ci-app-prod.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/ci-app-prod.yml)
 [![Node CI for app production](https://github.com/growilabs/growi/actions/workflows/ci-app-prod.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/ci-app-prod.yml)
@@ -81,9 +81,9 @@ See [GROWI Docs: Environment Variables](https://docs.growi.org/en/admin-guide/ad
 
 
 ## Dependencies
 ## Dependencies
 
 
-- Node.js v20.x or v22.x
-- npm 10.x
-- pnpm 10.x
+- Node.js v18.x or v20.x
+- npm 6.x
+- pnpm 9.x
 - [Turborepo](https://turbo.build/repo)
 - [Turborepo](https://turbo.build/repo)
 - MongoDB v6.x or v8.x
 - MongoDB v6.x or v8.x
 
 
@@ -144,5 +144,5 @@ If you have questions or suggestions, you can [join our Slack team](https://comm
 [growi]: https://github.com/growilabs/growi
 [growi]: https://github.com/growilabs/growi
 [issues]: https://github.com/growilabs/growi/issues
 [issues]: https://github.com/growilabs/growi/issues
 [pulls]: https://github.com/growilabs/growi/pulls
 [pulls]: https://github.com/growilabs/growi/pulls
-[dockerhub]: https://hub.docker.com/r/weseek/growi
+[dockerhub]: https://hub.docker.com/r/growilabs/growi
 [docker-compose]: https://github.com/growilabs/growi-docker-compose
 [docker-compose]: https://github.com/growilabs/growi-docker-compose

+ 5 - 5
README_JP.md

@@ -16,7 +16,7 @@
 
 
 # GROWI
 # GROWI
 
 
-[![docker pulls](https://img.shields.io/docker/pulls/weseek/growi.svg)](https://hub.docker.com/r/weseek/growi/)
+[![docker pulls](https://img.shields.io/docker/pulls/growilabs/growi.svg)](https://hub.docker.com/r/growilabs/growi/)
 [![CodeQL](https://github.com/growilabs/growi/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/codeql-analysis.yml)
 [![CodeQL](https://github.com/growilabs/growi/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/codeql-analysis.yml)
 [![Node CI for app development](https://github.com/growilabs/growi/actions/workflows/ci-app.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/ci-app.yml)
 [![Node CI for app development](https://github.com/growilabs/growi/actions/workflows/ci-app.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/ci-app.yml)
 [![Node CI for app production](https://github.com/growilabs/growi/actions/workflows/ci-app-prod.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/ci-app-prod.yml)
 [![Node CI for app production](https://github.com/growilabs/growi/actions/workflows/ci-app-prod.yml/badge.svg)](https://github.com/growilabs/growi/actions/workflows/ci-app-prod.yml)
@@ -81,9 +81,9 @@ Crowi からの移行は **[こちら](https://docs.growi.org/en/admin-guide/mig
 
 
 ## 依存関係
 ## 依存関係
 
 
-- Node.js v20.x or v22.x
-- npm 10.x
-- pnpm 10.x
+- Node.js v18.x or v20.x
+- npm 6.x
+- pnpm 9.x
 - [Turborepo](https://turbo.build/repo)
 - [Turborepo](https://turbo.build/repo)
 - MongoDB v6.x or v8.x
 - MongoDB v6.x or v8.x
 
 
@@ -143,5 +143,5 @@ Issue と Pull requests の作成は英語・日本語どちらでも受け付
   [growi]: https://github.com/growilabs/growi
   [growi]: https://github.com/growilabs/growi
   [issues]: https://github.com/growilabs/growi/issues
   [issues]: https://github.com/growilabs/growi/issues
   [pulls]: https://github.com/growilabs/growi/pulls
   [pulls]: https://github.com/growilabs/growi/pulls
-  [dockerhub]: https://hub.docker.com/r/weseek/growi
+  [dockerhub]: https://hub.docker.com/r/growilabs/growi
   [docker-compose]: https://github.com/growilabs/growi-docker-compose
   [docker-compose]: https://github.com/growilabs/growi-docker-compose

+ 1 - 1
apps/app/bin/openapi/generate-operation-ids/cli.spec.ts

@@ -1,4 +1,4 @@
-import { writeFileSync } from 'fs';
+import { writeFileSync } from 'node:fs';
 
 
 import { beforeEach, describe, expect, it, vi } from 'vitest';
 import { beforeEach, describe, expect, it, vi } from 'vitest';
 
 

+ 1 - 1
apps/app/bin/openapi/generate-operation-ids/cli.ts

@@ -1,5 +1,5 @@
+import { writeFileSync } from 'node:fs';
 import { Command } from 'commander';
 import { Command } from 'commander';
-import { writeFileSync } from 'fs';
 
 
 import { generateOperationIds } from './generate-operation-ids';
 import { generateOperationIds } from './generate-operation-ids';
 
 

+ 3 - 3
apps/app/bin/openapi/generate-operation-ids/generate-operation-ids.spec.ts

@@ -1,7 +1,7 @@
-import fs from 'fs/promises';
+import fs from 'node:fs/promises';
+import { tmpdir } from 'node:os';
+import path from 'node:path';
 import type { OpenAPI3 } from 'openapi-typescript';
 import type { OpenAPI3 } from 'openapi-typescript';
-import { tmpdir } from 'os';
-import path from 'path';
 import { describe, expect, it } from 'vitest';
 import { describe, expect, it } from 'vitest';
 
 
 import { generateOperationIds } from './generate-operation-ids';
 import { generateOperationIds } from './generate-operation-ids';

+ 54 - 18
apps/app/bin/print-memory-consumption.ts

@@ -11,7 +11,7 @@
  *        print-memory-consumption.ts [--port=9229] [--host=localhost] [--json]
  *        print-memory-consumption.ts [--port=9229] [--host=localhost] [--json]
  */
  */
 
 
-import { get } from 'http';
+import { get } from 'node:http';
 
 
 import WebSocket from 'ws';
 import WebSocket from 'ws';
 
 
@@ -55,7 +55,10 @@ class NodeMemoryConsumptionChecker {
   }
   }
 
 
   // Helper method to get pressure status and icon
   // Helper method to get pressure status and icon
-  private getPressureInfo(percentage: number): { status: string; icon: string } {
+  private getPressureInfo(percentage: number): {
+    status: string;
+    icon: string;
+  } {
     if (percentage > 90) return { status: 'HIGH PRESSURE', icon: '🔴' };
     if (percentage > 90) return { status: 'HIGH PRESSURE', icon: '🔴' };
     if (percentage > 70) return { status: 'MODERATE PRESSURE', icon: '🟡' };
     if (percentage > 70) return { status: 'MODERATE PRESSURE', icon: '🟡' };
     return { status: 'LOW PRESSURE', icon: '🟢' };
     return { status: 'LOW PRESSURE', icon: '🟢' };
@@ -71,9 +74,13 @@ class NodeMemoryConsumptionChecker {
     return new Promise((resolve, reject) => {
     return new Promise((resolve, reject) => {
       get(url, (res) => {
       get(url, (res) => {
         let data = '';
         let data = '';
-        res.on('data', (chunk) => { data += chunk; });
+        res.on('data', (chunk) => {
+          data += chunk;
+        });
         res.on('end', () => resolve(data));
         res.on('end', () => resolve(data));
-      }).on('error', (err) => reject(this.createError(`Cannot connect to ${url}: ${err.message}`)));
+      }).on('error', (err) =>
+        reject(this.createError(`Cannot connect to ${url}: ${err.message}`)),
+      );
     });
     });
   }
   }
 
 
@@ -218,9 +225,20 @@ class NodeMemoryConsumptionChecker {
       return;
       return;
     }
     }
 
 
-    const [heapUsedMB, heapTotalMB, heapLimitMB, rssMB, externalMB, arrayBuffersMB] = [
-      this.toMB(info.heapUsed), this.toMB(info.heapTotal), this.toMB(info.heapLimit || 0),
-      this.toMB(info.rss), this.toMB(info.external), this.toMB(info.arrayBuffers)
+    const [
+      heapUsedMB,
+      heapTotalMB,
+      heapLimitMB,
+      rssMB,
+      externalMB,
+      arrayBuffersMB,
+    ] = [
+      this.toMB(info.heapUsed),
+      this.toMB(info.heapTotal),
+      this.toMB(info.heapLimit || 0),
+      this.toMB(info.rss),
+      this.toMB(info.external),
+      this.toMB(info.arrayBuffers),
     ];
     ];
 
 
     console.log('\n📊 Node.js Memory Information');
     console.log('\n📊 Node.js Memory Information');
@@ -237,20 +255,35 @@ class NodeMemoryConsumptionChecker {
     // Heap Limits
     // Heap Limits
     console.log('\n🔸 Heap Limits:');
     console.log('\n🔸 Heap Limits:');
     if (info.heapLimit) {
     if (info.heapLimit) {
-      const limitType = info.heapLimitSource === 'explicit' ? 'Explicit Limit' : 'Default Limit';
-      const limitSource = info.heapLimitSource === 'explicit' ? '(from --max-old-space-size)' : '(system default)';
-      console.log(`  ${limitType}: ${heapLimitMB.toFixed(2)} MB ${limitSource}`);
-      console.log(`  Global Usage:   ${((heapUsedMB / heapLimitMB) * 100).toFixed(2)}% of maximum`);
+      const limitType =
+        info.heapLimitSource === 'explicit'
+          ? 'Explicit Limit'
+          : 'Default Limit';
+      const limitSource =
+        info.heapLimitSource === 'explicit'
+          ? '(from --max-old-space-size)'
+          : '(system default)';
+      console.log(
+        `  ${limitType}: ${heapLimitMB.toFixed(2)} MB ${limitSource}`,
+      );
+      console.log(
+        `  Global Usage:   ${((heapUsedMB / heapLimitMB) * 100).toFixed(2)}% of maximum`,
+      );
     }
     }
 
 
     // Heap Pressure Analysis
     // Heap Pressure Analysis
     const heapPressure = (info.heapUsed / info.heapTotal) * 100;
     const heapPressure = (info.heapUsed / info.heapTotal) * 100;
-    const { status: pressureStatus, icon: pressureIcon } = this.getPressureInfo(heapPressure);
+    const { status: pressureStatus, icon: pressureIcon } =
+      this.getPressureInfo(heapPressure);
     console.log('\n� Memory Pressure Analysis:');
     console.log('\n� Memory Pressure Analysis:');
-    console.log(`  Current Pool:   ${pressureIcon} ${pressureStatus} (${heapPressure.toFixed(1)}% of allocated heap)`);
+    console.log(
+      `  Current Pool:   ${pressureIcon} ${pressureStatus} (${heapPressure.toFixed(1)}% of allocated heap)`,
+    );
 
 
     if (heapPressure > 90) {
     if (heapPressure > 90) {
-      console.log('  📝 Note: High pressure is normal - Node.js will allocate more heap as needed');
+      console.log(
+        '  📝 Note: High pressure is normal - Node.js will allocate more heap as needed',
+      );
     }
     }
 
 
     // System Information
     // System Information
@@ -271,10 +304,13 @@ class NodeMemoryConsumptionChecker {
     console.log('\n📋 Summary:');
     console.log('\n📋 Summary:');
     if (info.heapLimit) {
     if (info.heapLimit) {
       const heapUsagePercent = (heapUsedMB / heapLimitMB) * 100;
       const heapUsagePercent = (heapUsedMB / heapLimitMB) * 100;
-      console.log(`Heap Memory: ${heapUsedMB.toFixed(2)} MB / ${heapLimitMB.toFixed(2)} MB (${heapUsagePercent.toFixed(2)}%)`);
-      console.log(heapUsagePercent > 80
-        ? '⚠️  Consider increasing heap limit with --max-old-space-size if needed'
-        : '✅ Memory usage is within healthy limits'
+      console.log(
+        `Heap Memory: ${heapUsedMB.toFixed(2)} MB / ${heapLimitMB.toFixed(2)} MB (${heapUsagePercent.toFixed(2)}%)`,
+      );
+      console.log(
+        heapUsagePercent > 80
+          ? '⚠️  Consider increasing heap limit with --max-old-space-size if needed'
+          : '✅ Memory usage is within healthy limits',
       );
       );
     }
     }
 
 

+ 0 - 14
apps/app/config/cdn.js

@@ -1,14 +0,0 @@
-import path from 'path';
-
-import { projectRoot } from '~/utils/project-dir-utils';
-
-export const cdnLocalScriptRoot = path.join(
-  projectRoot,
-  'public/static/js/cdn',
-);
-export const cdnLocalScriptWebRoot = '/static/js/cdn';
-export const cdnLocalStyleRoot = path.join(
-  projectRoot,
-  'public/static/styles/cdn',
-);
-export const cdnLocalStyleWebRoot = '/static/styles/cdn';

+ 1 - 1
apps/app/config/migrate-mongo-config.js

@@ -6,7 +6,7 @@
  */
  */
 const isProduction = process.env.NODE_ENV === 'production';
 const isProduction = process.env.NODE_ENV === 'production';
 
 
-const { URL } = require('url');
+const { URL } = require('node:url');
 
 
 const { getMongoUri, mongoOptions } = isProduction
 const { getMongoUri, mongoOptions } = isProduction
   ? // eslint-disable-next-line import/extensions, import/no-unresolved
   ? // eslint-disable-next-line import/extensions, import/no-unresolved

+ 1 - 0
apps/app/config/next-i18next.config.js

@@ -1,5 +1,6 @@
 const isDev = process.env.NODE_ENV === 'development';
 const isDev = process.env.NODE_ENV === 'development';
 
 
+// biome-ignore lint/style/useNodejsImportProtocol: ignore
 const path = require('path');
 const path = require('path');
 
 
 const { AllLang } = require('@growi/core');
 const { AllLang } = require('@growi/core');

+ 2 - 2
apps/app/docker/Dockerfile

@@ -6,7 +6,7 @@ ARG PNPM_HOME="/root/.local/share/pnpm"
 ##
 ##
 ## base
 ## base
 ##
 ##
-FROM node:22-slim AS base
+FROM node:20-slim AS base
 
 
 ARG OPT_DIR
 ARG OPT_DIR
 ARG PNPM_HOME
 ARG PNPM_HOME
@@ -72,7 +72,7 @@ RUN tar -zcf /tmp/packages.tar.gz \
 ##
 ##
 ## release
 ## release
 ##
 ##
-FROM node:22-slim
+FROM node:20-slim
 LABEL maintainer="Yuki Takei <yuki@weseek.co.jp>"
 LABEL maintainer="Yuki Takei <yuki@weseek.co.jp>"
 
 
 ARG OPT_DIR
 ARG OPT_DIR

+ 5 - 5
apps/app/docker/README.md

@@ -2,7 +2,7 @@
 GROWI Official docker image
 GROWI Official docker image
 ========================
 ========================
 
 
-[![Actions Status](https://github.com/growilabs/growi/workflows/Release/badge.svg)](https://github.com/growilabs/growi/actions) [![docker-pulls](https://img.shields.io/docker/pulls/weseek/growi.svg)](https://hub.docker.com/r/weseek/growi/) [![](https://images.microbadger.com/badges/image/weseek/growi.svg)](https://microbadger.com/images/weseek/growi)
+[![Actions Status](https://github.com/growilabs/growi/workflows/Release/badge.svg)](https://github.com/growilabs/growi/actions) [![docker-pulls](https://img.shields.io/docker/pulls/growilabs/growi.svg)](https://hub.docker.com/r/growilabs/growi/) 
 
 
 ![GROWI-x-docker](https://github.com/user-attachments/assets/1a82236d-5a85-4a2e-842a-971b4c1625e6)
 ![GROWI-x-docker](https://github.com/user-attachments/assets/1a82236d-5a85-4a2e-842a-971b4c1625e6)
 
 
@@ -18,7 +18,7 @@ Supported tags and respective Dockerfile links
 What is GROWI?
 What is GROWI?
 -------------
 -------------
 
 
-GROWI is a team collaboration software and it forked from [crowi](https://github.com/weseek/crowi/crowi)
+GROWI is a team collaboration software and it forked from [crowi](https://github.com/crowi/crowi)
 
 
 see: [growilabs/growi](https://github.com/growilabs/growi)
 see: [growilabs/growi](https://github.com/growilabs/growi)
 
 
@@ -41,7 +41,7 @@ Usage
 ```bash
 ```bash
 docker run -d \
 docker run -d \
     -e MONGO_URI=mongodb://MONGODB_HOST:MONGODB_PORT/growi \
     -e MONGO_URI=mongodb://MONGODB_HOST:MONGODB_PORT/growi \
-    weseek/growi
+    growilabs/growi
 ```
 ```
 
 
 and go to `http://localhost:3000/` .
 and go to `http://localhost:3000/` .
@@ -52,7 +52,7 @@ If you use ElasticSearch, type this:
 docker run -d \
 docker run -d \
     -e MONGO_URI=mongodb://MONGODB_HOST:MONGODB_PORT/growi \
     -e MONGO_URI=mongodb://MONGODB_HOST:MONGODB_PORT/growi \
     -e ELASTICSEARCH_URI=http://ELASTICSEARCH_HOST:ELASTICSEARCH_PORT/growi \
     -e ELASTICSEARCH_URI=http://ELASTICSEARCH_HOST:ELASTICSEARCH_PORT/growi \
-    weseek/growi
+    growilabs/growi
 ```
 ```
 
 
 
 
@@ -60,7 +60,7 @@ docker run -d \
 
 
 Using docker-compose is the fastest and the most convenient way to boot GROWI.
 Using docker-compose is the fastest and the most convenient way to boot GROWI.
 
 
-see: [weseek/growi-docker-compose](https://github.com/growilabs/growi-docker-compose)
+see: [growilabs/growi-docker-compose](https://github.com/growilabs/growi-docker-compose)
 
 
 
 
 Configuration
 Configuration

+ 2 - 2
apps/app/next.config.js

@@ -5,7 +5,7 @@
  * See: https://github.com/vercel/next.js/discussions/35969#discussioncomment-2522954
  * See: https://github.com/vercel/next.js/discussions/35969#discussioncomment-2522954
  */
  */
 
 
-const path = require('path');
+const path = require('node:path');
 
 
 const { withSuperjson } = require('next-superjson');
 const { withSuperjson } = require('next-superjson');
 const {
 const {
@@ -93,7 +93,7 @@ const optimizePackageImports = [
   '@growi/ui',
   '@growi/ui',
 ];
 ];
 
 
-module.exports = async (phase, { defaultConfig }) => {
+module.exports = async (phase) => {
   const { i18n, localePath } = require('./config/next-i18next.config');
   const { i18n, localePath } = require('./config/next-i18next.config');
 
 
   /** @type {import('next').NextConfig} */
   /** @type {import('next').NextConfig} */

+ 2 - 2
apps/app/package.json

@@ -1,6 +1,6 @@
 {
 {
   "name": "@growi/app",
   "name": "@growi/app",
-  "version": "7.3.1-RC.0",
+  "version": "7.3.2-RC.0",
   "license": "MIT",
   "license": "MIT",
   "private": "true",
   "private": "true",
   "scripts": {
   "scripts": {
@@ -166,7 +166,7 @@
     "mkdirp": "^1.0.3",
     "mkdirp": "^1.0.3",
     "mongodb": "^4.17.2",
     "mongodb": "^4.17.2",
     "mongoose": "^6.13.6",
     "mongoose": "^6.13.6",
-    "mongoose-gridfs": "^1.2.42",
+    "mongoose-gridfs": "^1.3.0",
     "mongoose-paginate-v2": "^1.3.9",
     "mongoose-paginate-v2": "^1.3.9",
     "mongoose-unique-validator": "^2.0.3",
     "mongoose-unique-validator": "^2.0.3",
     "multer": "~1.4.0",
     "multer": "~1.4.0",

+ 18 - 8
apps/app/src/client/components/Admin/ElasticsearchManagement/ElasticsearchManagement.tsx

@@ -14,7 +14,7 @@ import RebuildIndexControls from './RebuildIndexControls';
 import ReconnectControls from './ReconnectControls';
 import ReconnectControls from './ReconnectControls';
 import StatusTable from './StatusTable';
 import StatusTable from './StatusTable';
 
 
-const ElasticsearchManagement = () => {
+const ElasticsearchManagement = (): JSX.Element => {
   const { t } = useTranslation('admin');
   const { t } = useTranslation('admin');
   const { data: isSearchServiceReachable } = useIsSearchServiceReachable();
   const { data: isSearchServiceReachable } = useIsSearchServiceReachable();
   const { data: socket } = useAdminSocket();
   const { data: socket } = useAdminSocket();
@@ -43,6 +43,8 @@ const ElasticsearchManagement = () => {
       setIndicesData(info.indices);
       setIndicesData(info.indices);
       setAliasesData(info.aliases);
       setAliasesData(info.aliases);
       setIsNormalized(info.isNormalized);
       setIsNormalized(info.isNormalized);
+
+      return info.isNormalized;
     }
     }
     catch (errors: unknown) {
     catch (errors: unknown) {
       setIsConnected(false);
       setIsConnected(false);
@@ -60,6 +62,7 @@ const ElasticsearchManagement = () => {
         toastError(errors as Error);
         toastError(errors as Error);
       }
       }
 
 
+      return false;
     }
     }
     finally {
     finally {
       setIsInitialized(true);
       setIsInitialized(true);
@@ -67,13 +70,9 @@ const ElasticsearchManagement = () => {
   }, []);
   }, []);
 
 
   useEffect(() => {
   useEffect(() => {
-    const fetchIndicesStatusData = async() => {
-      await retrieveIndicesStatus();
-    };
-    fetchIndicesStatusData();
+    retrieveIndicesStatus();
   }, [retrieveIndicesStatus]);
   }, [retrieveIndicesStatus]);
 
 
-
   useEffect(() => {
   useEffect(() => {
     if (socket == null) {
     if (socket == null) {
       return;
       return;
@@ -83,7 +82,19 @@ const ElasticsearchManagement = () => {
     });
     });
 
 
     socket.on(SocketEventName.FinishAddPage, async(data) => {
     socket.on(SocketEventName.FinishAddPage, async(data) => {
-      await retrieveIndicesStatus();
+      let retryCount = 0;
+      const maxRetries = 5;
+      const retryDelay = 500;
+
+      const retrieveIndicesStatusWithRetry = async() => {
+        const isNormalizedResult = await retrieveIndicesStatus();
+        if (!isNormalizedResult && retryCount < maxRetries) {
+          retryCount++;
+          setTimeout(retrieveIndicesStatusWithRetry, retryDelay);
+        }
+      };
+
+      await retrieveIndicesStatusWithRetry();
       setIsRebuildingProcessing(false);
       setIsRebuildingProcessing(false);
       setIsRebuildingCompleted(true);
       setIsRebuildingCompleted(true);
     });
     });
@@ -99,7 +110,6 @@ const ElasticsearchManagement = () => {
     };
     };
   }, [retrieveIndicesStatus, socket]);
   }, [retrieveIndicesStatus, socket]);
 
 
-
   const reconnect = async() => {
   const reconnect = async() => {
     setIsReconnectingProcessing(true);
     setIsReconnectingProcessing(true);
 
 

+ 22 - 31
apps/app/src/client/components/Admin/ImportData/GrowiArchive/ImportCollectionItem.jsx

@@ -1,7 +1,9 @@
 import React from 'react';
 import React from 'react';
 
 
 import PropTypes from 'prop-types';
 import PropTypes from 'prop-types';
-import { Progress } from 'reactstrap';
+import {
+  Progress, UncontrolledDropdown, DropdownToggle, DropdownMenu, DropdownItem,
+} from 'reactstrap';
 
 
 import { GrowiArchiveImportOption } from '~/models/admin/growi-archive-import-option';
 import { GrowiArchiveImportOption } from '~/models/admin/growi-archive-import-option';
 
 
@@ -49,6 +51,8 @@ export default class ImportCollectionItem extends React.Component {
     onOptionChange(collectionName, { mode });
     onOptionChange(collectionName, { mode });
   }
   }
 
 
+  // No toggle state needed when using UncontrolledDropdown
+
   configButtonClickedHandler() {
   configButtonClickedHandler() {
     const { collectionName, onConfigButtonClicked } = this.props;
     const { collectionName, onConfigButtonClicked } = this.props;
 
 
@@ -103,40 +107,28 @@ export default class ImportCollectionItem extends React.Component {
     const {
     const {
       collectionName, option, isImporting,
       collectionName, option, isImporting,
     } = this.props;
     } = this.props;
-
-    const attrMap = MODE_ATTR_MAP[option.mode];
-    const btnColor = `btn-${attrMap.color}`;
-
+    const currentMode = option?.mode || 'insert';
+    const attrMap = MODE_ATTR_MAP[currentMode];
     const modes = MODE_RESTRICTED_COLLECTION[collectionName] || Object.keys(MODE_ATTR_MAP);
     const modes = MODE_RESTRICTED_COLLECTION[collectionName] || Object.keys(MODE_ATTR_MAP);
 
 
     return (
     return (
       <span className="d-inline-flex align-items-center">
       <span className="d-inline-flex align-items-center">
         Mode:&nbsp;
         Mode:&nbsp;
-        <div className="dropdown d-inline-block">
-          <button
-            className={`btn ${btnColor} btn-sm dropdown-toggle`}
-            type="button"
-            id="ddmMode"
-            disabled={isImporting}
-            data-bs-toggle="dropdown"
-            aria-haspopup="true"
-            aria-expanded="true"
-          >
-            {this.renderModeLabel(option.mode)}
-            <span className="caret ms-2"></span>
-          </button>
-          <ul className="dropdown-menu" aria-labelledby="ddmMode">
-            { modes.map((mode) => {
-              return (
-                <li key={`buttonMode_${mode}`}>
-                  <button type="button" className="dropdown-item" role="button" onClick={() => this.modeSelectedHandler(mode)}>
-                    {this.renderModeLabel(mode, true)}
-                  </button>
-                </li>
-              );
-            }) }
-          </ul>
-        </div>
+        <UncontrolledDropdown size="sm" className="d-inline-block">
+          <DropdownToggle color={attrMap.color} caret disabled={isImporting} id={`ddmMode-${collectionName}`}>
+            {this.renderModeLabel(currentMode)}
+          </DropdownToggle>
+          <DropdownMenu>
+            {modes.map(mode => (
+              <DropdownItem
+                key={`buttonMode_${mode}`}
+                onClick={() => this.modeSelectedHandler(mode)}
+              >
+                {this.renderModeLabel(mode, true)}
+              </DropdownItem>
+            ))}
+          </DropdownMenu>
+        </UncontrolledDropdown>
       </span>
       </span>
     );
     );
   }
   }
@@ -190,7 +182,6 @@ export default class ImportCollectionItem extends React.Component {
         }
         }
       </div>
       </div>
     );
     );
-
   }
   }
 
 
   render() {
   render() {

+ 5 - 2
apps/app/src/client/components/TreeItem/TreeItemLayout.tsx

@@ -9,6 +9,8 @@ import React, {
   type JSX,
   type JSX,
 } from 'react';
 } from 'react';
 
 
+import { addTrailingSlash } from '@growi/core/dist/utils/path-utils';
+
 import { useSWRxPageChildren } from '~/stores/page-listing';
 import { useSWRxPageChildren } from '~/stores/page-listing';
 import { usePageTreeDescCountMap } from '~/stores/ui';
 import { usePageTreeDescCountMap } from '~/stores/ui';
 
 
@@ -88,9 +90,10 @@ export const TreeItemLayout = (props: TreeItemLayoutProps): JSX.Element => {
     setIsOpen(!isOpen);
     setIsOpen(!isOpen);
   }, [isOpen]);
   }, [isOpen]);
 
 
-  // didMount
   useEffect(() => {
   useEffect(() => {
-    const isPathToTarget = page.path != null && targetPath.startsWith(page.path) && targetPath !== page.path; // Target Page does not need to be opened
+    const isPathToTarget = page.path != null
+      && targetPath.startsWith(addTrailingSlash(page.path))
+      && targetPath !== page.path; // Target Page does not need to be opened
     if (isPathToTarget) setIsOpen(true);
     if (isPathToTarget) setIsOpen(true);
   }, [targetPath, page.path]);
   }, [targetPath, page.path]);
 
 

+ 1 - 1
apps/app/src/features/comment/server/events/event-emitter.ts

@@ -1,3 +1,3 @@
-import { EventEmitter } from 'events';
+import { EventEmitter } from 'node:events';
 
 
 export const commentEvent = new EventEmitter();
 export const commentEvent = new EventEmitter();

+ 1 - 1
apps/app/src/features/external-user-group/client/components/ExternalUserGroup/ExternalUserGroupManagement.tsx

@@ -153,7 +153,7 @@ export const ExternalGroupManagement: FC = () => {
         hideDeleteModal();
         hideDeleteModal();
 
 
         toastSuccess(`Deleted ${selectedExternalUserGroup?.name} group.`);
         toastSuccess(`Deleted ${selectedExternalUserGroup?.name} group.`);
-      } catch (err) {
+      } catch {
         toastError(new Error('Unable to delete the groups'));
         toastError(new Error('Unable to delete the groups'));
       }
       }
     },
     },

+ 1 - 0
apps/app/src/features/external-user-group/server/models/external-user-group-relation.ts

@@ -15,6 +15,7 @@ export interface ExternalUserGroupRelationDocument
 
 
 export interface ExternalUserGroupRelationModel
 export interface ExternalUserGroupRelationModel
   extends Model<ExternalUserGroupRelationDocument> {
   extends Model<ExternalUserGroupRelationDocument> {
+  // biome-ignore lint/suspicious/noExplicitAny: ignore
   [x: string]: any; // for old methods
   [x: string]: any; // for old methods
 
 
   PAGE_ITEMS: 50;
   PAGE_ITEMS: 50;

+ 1 - 0
apps/app/src/features/external-user-group/server/models/external-user-group.ts

@@ -12,6 +12,7 @@ export interface ExternalUserGroupDocument
 
 
 export interface ExternalUserGroupModel
 export interface ExternalUserGroupModel
   extends Model<ExternalUserGroupDocument> {
   extends Model<ExternalUserGroupDocument> {
+  // biome-ignore lint/suspicious/noExplicitAny: ignore
   [x: string]: any; // for old methods
   [x: string]: any; // for old methods
 
 
   PAGE_ITEMS: 10;
   PAGE_ITEMS: 10;

+ 1 - 1
apps/app/src/features/external-user-group/server/routes/apiv3/external-user-group.ts

@@ -964,7 +964,7 @@ module.exports = (crowi: Crowi): Router => {
           req.user.name,
           req.user.name,
           req.body.password,
           req.body.password,
         );
         );
-      } catch (e) {
+      } catch (_e) {
         return res.apiv3Err(
         return res.apiv3Err(
           new ErrorV3(
           new ErrorV3(
             'LDAP group sync failed',
             'LDAP group sync failed',

+ 1 - 1
apps/app/src/features/growi-plugin/server/consts/index.ts

@@ -1,4 +1,4 @@
-import { resolveFromRoot } from '~/utils/project-dir-utils';
+import { resolveFromRoot } from '~/server/util/project-dir-utils';
 
 
 export const PLUGIN_STORING_PATH = resolveFromRoot('tmp/plugins');
 export const PLUGIN_STORING_PATH = resolveFromRoot('tmp/plugins');
 
 

+ 1 - 1
apps/app/src/features/opentelemetry/server/anonymization/handlers/page-access-handler.spec.ts

@@ -1,4 +1,4 @@
-import type { IncomingMessage } from 'http';
+import type { IncomingMessage } from 'node:http';
 
 
 import { describe, expect, it } from 'vitest';
 import { describe, expect, it } from 'vitest';
 
 

+ 6 - 3
apps/app/src/features/opentelemetry/server/anonymization/handlers/page-access-handler.ts

@@ -1,3 +1,5 @@
+import { createHash } from 'node:crypto';
+import type { IncomingMessage } from 'node:http';
 import {
 import {
   getUsernameByPath,
   getUsernameByPath,
   isCreatablePage,
   isCreatablePage,
@@ -7,8 +9,6 @@ import {
   isUsersTopPage,
   isUsersTopPage,
 } from '@growi/core/dist/utils/page-path-utils';
 } from '@growi/core/dist/utils/page-path-utils';
 import { diag } from '@opentelemetry/api';
 import { diag } from '@opentelemetry/api';
-import { createHash } from 'crypto';
-import type { IncomingMessage } from 'http';
 
 
 import { ATTR_HTTP_TARGET } from '../../semconv';
 import { ATTR_HTTP_TARGET } from '../../semconv';
 import type { AnonymizationModule } from '../interfaces/anonymization-module';
 import type { AnonymizationModule } from '../interfaces/anonymization-module';
@@ -132,7 +132,10 @@ export const pageAccessModule: AnonymizationModule = {
   /**
   /**
    * Handle anonymization for page access requests
    * Handle anonymization for page access requests
    */
    */
-  handle(request: IncomingMessage, url: string): Record<string, string> | null {
+  handle(
+    _request: IncomingMessage,
+    url: string,
+  ): Record<string, string> | null {
     try {
     try {
       const parsedUrl = new URL(url, 'http://localhost');
       const parsedUrl = new URL(url, 'http://localhost');
       const originalPath = parsedUrl.pathname;
       const originalPath = parsedUrl.pathname;

+ 1 - 1
apps/app/src/features/opentelemetry/server/anonymization/handlers/page-api-handler.spec.ts

@@ -1,4 +1,4 @@
-import type { IncomingMessage } from 'http';
+import type { IncomingMessage } from 'node:http';
 
 
 import { beforeEach, describe, expect, it } from 'vitest';
 import { beforeEach, describe, expect, it } from 'vitest';
 
 

+ 5 - 2
apps/app/src/features/opentelemetry/server/anonymization/handlers/page-api-handler.ts

@@ -1,5 +1,5 @@
+import type { IncomingMessage } from 'node:http';
 import { diag } from '@opentelemetry/api';
 import { diag } from '@opentelemetry/api';
-import type { IncomingMessage } from 'http';
 
 
 import { ATTR_HTTP_TARGET } from '../../semconv';
 import { ATTR_HTTP_TARGET } from '../../semconv';
 import type { AnonymizationModule } from '../interfaces/anonymization-module';
 import type { AnonymizationModule } from '../interfaces/anonymization-module';
@@ -28,7 +28,10 @@ export const pageApiModule: AnonymizationModule = {
   /**
   /**
    * Handle anonymization for page API endpoints
    * Handle anonymization for page API endpoints
    */
    */
-  handle(request: IncomingMessage, url: string): Record<string, string> | null {
+  handle(
+    _request: IncomingMessage,
+    url: string,
+  ): Record<string, string> | null {
     const attributes: Record<string, string> = {};
     const attributes: Record<string, string> = {};
     let hasAnonymization = false;
     let hasAnonymization = false;
 
 

+ 1 - 1
apps/app/src/features/opentelemetry/server/anonymization/handlers/page-listing-api-handler.spec.ts

@@ -1,4 +1,4 @@
-import type { IncomingMessage } from 'http';
+import type { IncomingMessage } from 'node:http';
 
 
 import { beforeEach, describe, expect, it } from 'vitest';
 import { beforeEach, describe, expect, it } from 'vitest';
 
 

+ 5 - 2
apps/app/src/features/opentelemetry/server/anonymization/handlers/page-listing-api-handler.ts

@@ -1,5 +1,5 @@
+import type { IncomingMessage } from 'node:http';
 import { diag } from '@opentelemetry/api';
 import { diag } from '@opentelemetry/api';
-import type { IncomingMessage } from 'http';
 
 
 import { ATTR_HTTP_TARGET } from '../../semconv';
 import { ATTR_HTTP_TARGET } from '../../semconv';
 import type { AnonymizationModule } from '../interfaces/anonymization-module';
 import type { AnonymizationModule } from '../interfaces/anonymization-module';
@@ -28,7 +28,10 @@ export const pageListingApiModule: AnonymizationModule = {
   /**
   /**
    * Handle anonymization for page-listing API endpoints
    * Handle anonymization for page-listing API endpoints
    */
    */
-  handle(request: IncomingMessage, url: string): Record<string, string> | null {
+  handle(
+    _request: IncomingMessage,
+    url: string,
+  ): Record<string, string> | null {
     const attributes: Record<string, string> = {};
     const attributes: Record<string, string> = {};
     let hasAnonymization = false;
     let hasAnonymization = false;
 
 

+ 1 - 1
apps/app/src/features/opentelemetry/server/anonymization/handlers/search-api-handler.spec.ts

@@ -1,4 +1,4 @@
-import type { IncomingMessage } from 'http';
+import type { IncomingMessage } from 'node:http';
 
 
 import { beforeEach, describe, expect, it } from 'vitest';
 import { beforeEach, describe, expect, it } from 'vitest';
 
 

+ 1 - 1
apps/app/src/features/opentelemetry/server/anonymization/handlers/search-api-handler.ts

@@ -1,5 +1,5 @@
+import type { IncomingMessage } from 'node:http';
 import { diag } from '@opentelemetry/api';
 import { diag } from '@opentelemetry/api';
-import type { IncomingMessage } from 'http';
 
 
 import { ATTR_HTTP_TARGET } from '../../semconv';
 import { ATTR_HTTP_TARGET } from '../../semconv';
 import type { AnonymizationModule } from '../interfaces/anonymization-module';
 import type { AnonymizationModule } from '../interfaces/anonymization-module';

+ 1 - 1
apps/app/src/features/opentelemetry/server/anonymization/interfaces/anonymization-module.ts

@@ -1,4 +1,4 @@
-import type { IncomingMessage } from 'http';
+import type { IncomingMessage } from 'node:http';
 
 
 /**
 /**
  * Interface for anonymization modules
  * Interface for anonymization modules

+ 1 - 1
apps/app/src/features/opentelemetry/server/custom-metrics/application-metrics.spec.ts

@@ -1,5 +1,5 @@
+import crypto from 'node:crypto';
 import { type Meter, metrics, type ObservableGauge } from '@opentelemetry/api';
 import { type Meter, metrics, type ObservableGauge } from '@opentelemetry/api';
-import crypto from 'crypto';
 import { mock } from 'vitest-mock-extended';
 import { mock } from 'vitest-mock-extended';
 
 
 import { configManager } from '~/server/service/config-manager';
 import { configManager } from '~/server/service/config-manager';

+ 1 - 1
apps/app/src/features/opentelemetry/server/custom-metrics/application-metrics.ts

@@ -1,5 +1,5 @@
+import crypto from 'node:crypto';
 import { diag, metrics } from '@opentelemetry/api';
 import { diag, metrics } from '@opentelemetry/api';
-import crypto from 'crypto';
 
 
 import { configManager } from '~/server/service/config-manager';
 import { configManager } from '~/server/service/config-manager';
 import loggerFactory from '~/utils/logger';
 import loggerFactory from '~/utils/logger';

+ 2 - 2
apps/app/src/features/opentelemetry/server/logger.ts

@@ -23,7 +23,7 @@ class DiagLoggerBunyanAdapter implements DiagLogger {
             ? data.message
             ? data.message
             : message;
             : message;
       }
       }
-    } catch (e) {
+    } catch (_e) {
       // do nothing if the message is not a JSON string
       // do nothing if the message is not a JSON string
     }
     }
 
 
@@ -35,7 +35,7 @@ class DiagLoggerBunyanAdapter implements DiagLogger {
           try {
           try {
             const parsed = JSON.parse(arg);
             const parsed = JSON.parse(arg);
             return { ...acc, ...parsed };
             return { ...acc, ...parsed };
-          } catch (e) {
+          } catch (_e) {
             return { ...acc, additionalInfo: arg };
             return { ...acc, additionalInfo: arg };
           }
           }
         }
         }

+ 1 - 1
apps/app/src/features/opentelemetry/server/node-sdk-configuration.ts

@@ -76,7 +76,7 @@ export const generateNodeSDKConfiguration = (opts?: Option): Configuration => {
  * This function should be called after database is available
  * This function should be called after database is available
  */
  */
 export const generateAdditionalResourceAttributes = async (
 export const generateAdditionalResourceAttributes = async (
-  opts?: Option,
+  _opts?: Option,
 ): Promise<Resource> => {
 ): Promise<Resource> => {
   if (resource == null) {
   if (resource == null) {
     throw new Error(
     throw new Error(

+ 1 - 1
apps/app/src/features/opentelemetry/server/node-sdk-resource.ts

@@ -23,7 +23,7 @@ export const setResource = (sdk: NodeSDK, resource: Resource): void => {
   // Verify that we can access the _resource property
   // Verify that we can access the _resource property
   try {
   try {
     getResource(sdk);
     getResource(sdk);
-  } catch (e) {
+  } catch (_e) {
     throw new Error('Failed to access SDK resource');
     throw new Error('Failed to access SDK resource');
   }
   }
 
 

+ 1 - 1
apps/app/src/features/page-bulk-export/client/components/PageBulkExportSelectModal.tsx

@@ -50,7 +50,7 @@ const PageBulkExportSelectModal = (): JSX.Element => {
           restartJob: true,
           restartJob: true,
         });
         });
         toastSuccess(t('page_export.bulk_export_started'));
         toastSuccess(t('page_export.bulk_export_started'));
-      } catch (e) {
+      } catch (_e) {
         toastError(t('page_export.failed_to_export'));
         toastError(t('page_export.failed_to_export'));
       }
       }
       setIsRestartModalOpened(false);
       setIsRestartModalOpened(false);

+ 3 - 3
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/index.ts

@@ -1,9 +1,9 @@
+import fs from 'node:fs';
+import path from 'node:path';
+import type { Readable } from 'node:stream';
 import type { IUser } from '@growi/core';
 import type { IUser } from '@growi/core';
 import { getIdForRef, isPopulated } from '@growi/core';
 import { getIdForRef, isPopulated } from '@growi/core';
-import fs from 'fs';
 import mongoose from 'mongoose';
 import mongoose from 'mongoose';
-import path from 'path';
-import type { Readable } from 'stream';
 
 
 import type { SupportedActionType } from '~/interfaces/activity';
 import type { SupportedActionType } from '~/interfaces/activity';
 import { SupportedAction, SupportedTargetModel } from '~/interfaces/activity';
 import { SupportedAction, SupportedTargetModel } from '~/interfaces/activity';

+ 2 - 2
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/create-page-snapshots-async.ts

@@ -1,8 +1,8 @@
+import { createHash } from 'node:crypto';
+import { pipeline, Writable } from 'node:stream';
 import type { IPage } from '@growi/core';
 import type { IPage } from '@growi/core';
 import { getIdForRef, getIdStringForRef } from '@growi/core';
 import { getIdForRef, getIdStringForRef } from '@growi/core';
-import { createHash } from 'crypto';
 import mongoose from 'mongoose';
 import mongoose from 'mongoose';
-import { pipeline, Writable } from 'stream';
 
 
 import { PageBulkExportJobStatus } from '~/features/page-bulk-export/interfaces/page-bulk-export';
 import { PageBulkExportJobStatus } from '~/features/page-bulk-export/interfaces/page-bulk-export';
 import { SupportedAction } from '~/interfaces/activity';
 import { SupportedAction } from '~/interfaces/activity';

+ 3 - 3
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/export-pages-to-fs-async.ts

@@ -1,15 +1,15 @@
+import fs from 'node:fs';
+import path from 'node:path';
+import { pipeline, Writable } from 'node:stream';
 import { dynamicImport } from '@cspell/dynamic-import';
 import { dynamicImport } from '@cspell/dynamic-import';
 import { isPopulated } from '@growi/core';
 import { isPopulated } from '@growi/core';
 import {
 import {
   getParentPath,
   getParentPath,
   normalizePath,
   normalizePath,
 } from '@growi/core/dist/utils/path-utils';
 } from '@growi/core/dist/utils/path-utils';
-import fs from 'fs';
 import type { Root } from 'mdast';
 import type { Root } from 'mdast';
-import path from 'path';
 import type * as RemarkHtml from 'remark-html';
 import type * as RemarkHtml from 'remark-html';
 import type * as RemarkParse from 'remark-parse';
 import type * as RemarkParse from 'remark-parse';
-import { pipeline, Writable } from 'stream';
 import type * as Unified from 'unified';
 import type * as Unified from 'unified';
 
 
 import {
 import {

+ 2 - 2
apps/app/src/features/templates/server/routes/apiv3/index.ts

@@ -1,3 +1,4 @@
+import path from 'node:path';
 import { GrowiPluginType } from '@growi/core';
 import { GrowiPluginType } from '@growi/core';
 import { SCOPE } from '@growi/core/dist/interfaces';
 import { SCOPE } from '@growi/core/dist/interfaces';
 import type { TemplateSummary } from '@growi/pluginkit/dist/v4';
 import type { TemplateSummary } from '@growi/pluginkit/dist/v4';
@@ -7,15 +8,14 @@ import {
 } from '@growi/pluginkit/dist/v4/server/index.cjs';
 } from '@growi/pluginkit/dist/v4/server/index.cjs';
 import express from 'express';
 import express from 'express';
 import { param, query } from 'express-validator';
 import { param, query } from 'express-validator';
-import path from 'path';
 import { PLUGIN_STORING_PATH } from '~/features/growi-plugin/server/consts';
 import { PLUGIN_STORING_PATH } from '~/features/growi-plugin/server/consts';
 import { GrowiPlugin } from '~/features/growi-plugin/server/models';
 import { GrowiPlugin } from '~/features/growi-plugin/server/models';
 import type Crowi from '~/server/crowi';
 import type Crowi from '~/server/crowi';
 import { accessTokenParser } from '~/server/middlewares/access-token-parser';
 import { accessTokenParser } from '~/server/middlewares/access-token-parser';
 import { apiV3FormValidator } from '~/server/middlewares/apiv3-form-validator';
 import { apiV3FormValidator } from '~/server/middlewares/apiv3-form-validator';
 import type { ApiV3Response } from '~/server/routes/apiv3/interfaces/apiv3-response';
 import type { ApiV3Response } from '~/server/routes/apiv3/interfaces/apiv3-response';
+import { resolveFromRoot } from '~/server/util/project-dir-utils';
 import loggerFactory from '~/utils/logger';
 import loggerFactory from '~/utils/logger';
-import { resolveFromRoot } from '~/utils/project-dir-utils';
 
 
 const logger = loggerFactory('growi:routes:apiv3:templates');
 const logger = loggerFactory('growi:routes:apiv3:templates');
 
 

+ 2 - 2
apps/app/src/migrations/20211227060705-revision-path-to-page-id-schema-migration--fixed-7549.js

@@ -1,6 +1,6 @@
+import { Writable } from 'node:stream';
+import { pipeline } from 'node:stream/promises';
 import mongoose from 'mongoose';
 import mongoose from 'mongoose';
-import { Writable } from 'stream';
-import { pipeline } from 'stream/promises';
 
 
 import getPageModel from '~/server/models/page';
 import getPageModel from '~/server/models/page';
 import { Revision } from '~/server/models/revision';
 import { Revision } from '~/server/models/revision';

+ 2 - 0
apps/app/src/server/crowi/express-init.js

@@ -2,10 +2,12 @@ import { themesRootPath as presetThemesRootPath } from '@growi/preset-themes';
 import csrf from 'csurf';
 import csrf from 'csurf';
 import qs from 'qs';
 import qs from 'qs';
 
 
+
 import { PLUGIN_EXPRESS_STATIC_DIR, PLUGIN_STORING_PATH } from '../../features/growi-plugin/server/consts';
 import { PLUGIN_EXPRESS_STATIC_DIR, PLUGIN_STORING_PATH } from '../../features/growi-plugin/server/consts';
 import loggerFactory from '../../utils/logger';
 import loggerFactory from '../../utils/logger';
 import { resolveFromRoot } from '../../utils/project-dir-utils';
 import { resolveFromRoot } from '../../utils/project-dir-utils';
 import CertifyOrigin from '../middlewares/certify-origin';
 import CertifyOrigin from '../middlewares/certify-origin';
+
 import registerSafeRedirectFactory from '../middlewares/safe-redirect';
 import registerSafeRedirectFactory from '../middlewares/safe-redirect';
 
 
 const logger = loggerFactory('growi:crowi:express-init');
 const logger = loggerFactory('growi:crowi:express-init');

+ 5 - 3
apps/app/src/server/crowi/index.js

@@ -18,9 +18,9 @@ import instanciatePageBulkExportJobCleanUpCronService, {
 } from '~/features/page-bulk-export/server/service/page-bulk-export-job-clean-up-cron';
 } from '~/features/page-bulk-export/server/service/page-bulk-export-job-clean-up-cron';
 import instanciatePageBulkExportJobCronService from '~/features/page-bulk-export/server/service/page-bulk-export-job-cron';
 import instanciatePageBulkExportJobCronService from '~/features/page-bulk-export/server/service/page-bulk-export-job-cron';
 import { startCron as startAccessTokenCron } from '~/server/service/access-token';
 import { startCron as startAccessTokenCron } from '~/server/service/access-token';
+import { projectRoot } from '~/server/util/project-dir-utils';
 import { getGrowiVersion } from '~/utils/growi-version';
 import { getGrowiVersion } from '~/utils/growi-version';
 import loggerFactory from '~/utils/logger';
 import loggerFactory from '~/utils/logger';
-import { projectRoot } from '~/utils/project-dir-utils';
 
 
 import UserEvent from '../events/user';
 import UserEvent from '../events/user';
 import { accessTokenParser } from '../middlewares/access-token-parser';
 import { accessTokenParser } from '../middlewares/access-token-parser';
@@ -32,7 +32,7 @@ import instanciateExportService from '../service/export';
 import instanciateExternalAccountService from '../service/external-account';
 import instanciateExternalAccountService from '../service/external-account';
 import { FileUploader, getUploader } from '../service/file-uploader'; // eslint-disable-line no-unused-vars
 import { FileUploader, getUploader } from '../service/file-uploader'; // eslint-disable-line no-unused-vars
 import { G2GTransferPusherService, G2GTransferReceiverService } from '../service/g2g-transfer';
 import { G2GTransferPusherService, G2GTransferReceiverService } from '../service/g2g-transfer';
-import GrowiBridgeService from '../service/growi-bridge';
+import { GrowiBridgeService } from '../service/growi-bridge';
 import { initializeImportService } from '../service/import';
 import { initializeImportService } from '../service/import';
 import { InstallerService } from '../service/installer';
 import { InstallerService } from '../service/installer';
 import { normalizeData } from '../service/normalize-data';
 import { normalizeData } from '../service/normalize-data';
@@ -82,6 +82,9 @@ class Crowi {
   /** @type {import('../service/growi-info').GrowiInfoService} */
   /** @type {import('../service/growi-info').GrowiInfoService} */
   growiInfoService;
   growiInfoService;
 
 
+  /** @type {import('../service/growi-bridge').GrowiBridgeService} */
+  growiBridgeService;
+
   /** @type {import('../service/page').IPageService} */
   /** @type {import('../service/page').IPageService} */
   pageService;
   pageService;
 
 
@@ -134,7 +137,6 @@ class Crowi {
     this.aclService = null;
     this.aclService = null;
     this.appService = null;
     this.appService = null;
     this.fileUploadService = null;
     this.fileUploadService = null;
-    this.growiBridgeService = null;
     this.pluginService = null;
     this.pluginService = null;
     this.searchService = null;
     this.searchService = null;
     this.socketIoService = null;
     this.socketIoService = null;

+ 3 - 2
apps/app/src/server/routes/apiv3/g2g-transfer.ts

@@ -1,19 +1,20 @@
 import { createReadStream } from 'fs';
 import { createReadStream } from 'fs';
 import path from 'path';
 import path from 'path';
 
 
+import { SCOPE } from '@growi/core/dist/interfaces';
 import { ErrorV3 } from '@growi/core/dist/models';
 import { ErrorV3 } from '@growi/core/dist/models';
 import type { NextFunction, Request, Router } from 'express';
 import type { NextFunction, Request, Router } from 'express';
 import express from 'express';
 import express from 'express';
 import { body } from 'express-validator';
 import { body } from 'express-validator';
 import multer from 'multer';
 import multer from 'multer';
 
 
-import { SCOPE } from '@growi/core/dist/interfaces';
 import { accessTokenParser } from '~/server/middlewares/access-token-parser';
 import { accessTokenParser } from '~/server/middlewares/access-token-parser';
 import { isG2GTransferError } from '~/server/models/vo/g2g-transfer-error';
 import { isG2GTransferError } from '~/server/models/vo/g2g-transfer-error';
 import { configManager } from '~/server/service/config-manager';
 import { configManager } from '~/server/service/config-manager';
 import { exportService } from '~/server/service/export';
 import { exportService } from '~/server/service/export';
 import type { IDataGROWIInfo } from '~/server/service/g2g-transfer';
 import type { IDataGROWIInfo } from '~/server/service/g2g-transfer';
 import { X_GROWI_TRANSFER_KEY_HEADER_NAME } from '~/server/service/g2g-transfer';
 import { X_GROWI_TRANSFER_KEY_HEADER_NAME } from '~/server/service/g2g-transfer';
+import type { ImportSettings } from '~/server/service/import';
 import { getImportService } from '~/server/service/import';
 import { getImportService } from '~/server/service/import';
 import loggerFactory from '~/utils/logger';
 import loggerFactory from '~/utils/logger';
 import { TransferKey } from '~/utils/vo/transfer-key';
 import { TransferKey } from '~/utils/vo/transfer-key';
@@ -313,7 +314,7 @@ module.exports = (crowi: Crowi): Router => {
     /*
     /*
      * generate maps of ImportSettings to import
      * generate maps of ImportSettings to import
      */
      */
-    let importSettingsMap;
+    let importSettingsMap: Map<string, ImportSettings>;
     try {
     try {
       importSettingsMap = g2gTransferReceiverService.getImportSettingMap(innerFileStats, optionsMap, operatorUserId);
       importSettingsMap = g2gTransferReceiverService.getImportSettingMap(innerFileStats, optionsMap, operatorUserId);
     }
     }

+ 23 - 17
apps/app/src/server/routes/apiv3/import.js → apps/app/src/server/routes/apiv3/import.ts

@@ -1,14 +1,18 @@
+import { SCOPE } from '@growi/core/dist/interfaces';
 import { ErrorV3 } from '@growi/core/dist/models';
 import { ErrorV3 } from '@growi/core/dist/models';
 
 
 import { SupportedAction } from '~/interfaces/activity';
 import { SupportedAction } from '~/interfaces/activity';
-import { SCOPE } from '@growi/core/dist/interfaces';
+import type { GrowiArchiveImportOption } from '~/models/admin/growi-archive-import-option';
+import type Crowi from '~/server/crowi';
 import { accessTokenParser } from '~/server/middlewares/access-token-parser';
 import { accessTokenParser } from '~/server/middlewares/access-token-parser';
+import type { ImportSettings } from '~/server/service/import';
 import { getImportService } from '~/server/service/import';
 import { getImportService } from '~/server/service/import';
 import { generateOverwriteParams } from '~/server/service/import/overwrite-params';
 import { generateOverwriteParams } from '~/server/service/import/overwrite-params';
 import loggerFactory from '~/utils/logger';
 import loggerFactory from '~/utils/logger';
 
 
 import { generateAddActivityMiddleware } from '../../middlewares/add-activity';
 import { generateAddActivityMiddleware } from '../../middlewares/add-activity';
 
 
+
 const logger = loggerFactory('growi:routes:apiv3:import'); // eslint-disable-line no-unused-vars
 const logger = loggerFactory('growi:routes:apiv3:import'); // eslint-disable-line no-unused-vars
 
 
 const path = require('path');
 const path = require('path');
@@ -122,14 +126,13 @@ const router = express.Router();
  *                  type: integer
  *                  type: integer
  *                  nullable: true
  *                  nullable: true
  */
  */
-/** @param {import('~/server/crowi').default} crowi Crowi instance */
-export default function route(crowi) {
+export default function route(crowi: Crowi): void {
   const { growiBridgeService, socketIoService } = crowi;
   const { growiBridgeService, socketIoService } = crowi;
-  const importService = getImportService(crowi);
+  const importService = getImportService();
 
 
   const loginRequired = require('../../middlewares/login-required')(crowi);
   const loginRequired = require('../../middlewares/login-required')(crowi);
   const adminRequired = require('../../middlewares/admin-required')(crowi);
   const adminRequired = require('../../middlewares/admin-required')(crowi);
-  const addActivity = generateAddActivityMiddleware(crowi);
+  const addActivity = generateAddActivityMiddleware();
 
 
   const adminEvent = crowi.event('admin');
   const adminEvent = crowi.event('admin');
   const activityEvent = crowi.event('activity');
   const activityEvent = crowi.event('activity');
@@ -312,18 +315,22 @@ export default function route(crowi) {
     /*
     /*
      * unzip, parse
      * unzip, parse
      */
      */
-    let meta = null;
-    let fileStatsToImport = null;
+    let meta;
+    let fileStatsToImport;
     try {
     try {
       // unzip
       // unzip
       await importService.unzip(zipFile);
       await importService.unzip(zipFile);
 
 
       // eslint-disable-next-line no-unused-vars
       // eslint-disable-next-line no-unused-vars
-      const { meta: parsedMeta, fileStats, innerFileStats } = await growiBridgeService.parseZipFile(zipFile);
-      meta = parsedMeta;
+      const parseZipResult = await growiBridgeService.parseZipFile(zipFile);
+      if (parseZipResult == null) {
+        throw new Error('parseZipFile returns null');
+      }
+
+      meta = parseZipResult.meta;
 
 
       // filter innerFileStats
       // filter innerFileStats
-      fileStatsToImport = innerFileStats.filter(({ fileName, collectionName, size }) => {
+      fileStatsToImport = parseZipResult.innerFileStats.filter(({ collectionName }) => {
         return collections.includes(collectionName);
         return collections.includes(collectionName);
       });
       });
     }
     }
@@ -346,21 +353,20 @@ export default function route(crowi) {
     }
     }
 
 
     // generate maps of ImportSettings to import
     // generate maps of ImportSettings to import
-    const importSettingsMap = {};
+    // Use the Map for a potential fix for the code scanning alert no. 895: Prototype-polluting assignment
+    const importSettingsMap = new Map<string, ImportSettings>();
     fileStatsToImport.forEach(({ fileName, collectionName }) => {
     fileStatsToImport.forEach(({ fileName, collectionName }) => {
       // instanciate GrowiArchiveImportOption
       // instanciate GrowiArchiveImportOption
-      /** @type {import('~/models/admin/growi-archive-import-option').GrowiArchiveImportOption} */
-      const option = options.find(opt => opt.collectionName === collectionName);
+      const option: GrowiArchiveImportOption = options.find(opt => opt.collectionName === collectionName);
 
 
       // generate options
       // generate options
-      /** @type {import('~/server/service/import').ImportSettings} */
       const importSettings = {
       const importSettings = {
         mode: option.mode,
         mode: option.mode,
         jsonFileName: fileName,
         jsonFileName: fileName,
         overwriteParams: generateOverwriteParams(collectionName, req.user._id, option),
         overwriteParams: generateOverwriteParams(collectionName, req.user._id, option),
-      };
+      } satisfies ImportSettings;
 
 
-      importSettingsMap[collectionName] = importSettings;
+      importSettingsMap.set(collectionName, importSettings);
     });
     });
 
 
     /*
     /*
@@ -411,7 +417,7 @@ export default function route(crowi) {
     async(req, res) => {
     async(req, res) => {
       const { file } = req;
       const { file } = req;
       const zipFile = importService.getFile(file.filename);
       const zipFile = importService.getFile(file.filename);
-      let data = null;
+      let data;
 
 
       try {
       try {
         data = await growiBridgeService.parseZipFile(zipFile);
         data = await growiBridgeService.parseZipFile(zipFile);

+ 3 - 3
apps/app/src/server/routes/attachment/get.ts

@@ -11,7 +11,7 @@ import type { CrowiProperties, CrowiRequest } from '~/interfaces/crowi-request';
 import { ResponseMode, type ExpressHttpHeader, type RespondOptions } from '~/server/interfaces/attachment';
 import { ResponseMode, type ExpressHttpHeader, type RespondOptions } from '~/server/interfaces/attachment';
 import {
 import {
   type FileUploader,
   type FileUploader,
-  toExpressHttpHeaders, ContentHeaders, applyHeaders,
+  toExpressHttpHeaders, applyHeaders, createContentHeaders,
 } from '~/server/service/file-uploader';
 } from '~/server/service/file-uploader';
 import loggerFactory from '~/utils/logger';
 import loggerFactory from '~/utils/logger';
 
 
@@ -110,8 +110,8 @@ const respondForRedirectMode = async(res: Response, fileUploadService: FileUploa
 const respondForRelayMode = async(res: Response, fileUploadService: FileUploader, attachment: IAttachmentDocument, opts?: RespondOptions): Promise<void> => {
 const respondForRelayMode = async(res: Response, fileUploadService: FileUploader, attachment: IAttachmentDocument, opts?: RespondOptions): Promise<void> => {
   // apply content-* headers before response
   // apply content-* headers before response
   const isDownload = opts?.download ?? false;
   const isDownload = opts?.download ?? false;
-  const contentHeaders = new ContentHeaders(attachment, { inline: !isDownload });
-  applyHeaders(res, contentHeaders.toExpressHttpHeaders());
+  const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
+  applyHeaders(res, contentHeaders);
 
 
   try {
   try {
     const readable = await fileUploadService.findDeliveryFile(attachment);
     const readable = await fileUploadService.findDeliveryFile(attachment);

+ 1 - 1
apps/app/src/server/routes/ogp.ts

@@ -13,8 +13,8 @@ import { param, validationResult } from 'express-validator';
 import type { HydratedDocument } from 'mongoose';
 import type { HydratedDocument } from 'mongoose';
 import mongoose from 'mongoose';
 import mongoose from 'mongoose';
 
 
+import { projectRoot } from '~/server/util/project-dir-utils';
 import loggerFactory from '~/utils/logger';
 import loggerFactory from '~/utils/logger';
-import { projectRoot } from '~/utils/project-dir-utils';
 
 
 import type Crowi from '../crowi';
 import type Crowi from '../crowi';
 import { Attachment } from '../models/attachment';
 import { Attachment } from '../models/attachment';

+ 5 - 0
apps/app/src/server/service/config-manager/config-definition.ts

@@ -50,6 +50,7 @@ export const CONFIG_KEYS = [
   'app:aiEnabled',
   'app:aiEnabled',
   'app:publishOpenAPI',
   'app:publishOpenAPI',
   'app:maxFileSize',
   'app:maxFileSize',
+  'app:fileUploadTimeout',
   'app:fileUploadTotalLimit',
   'app:fileUploadTotalLimit',
   'app:fileUploadDisabled',
   'app:fileUploadDisabled',
   'app:elasticsearchVersion',
   'app:elasticsearchVersion',
@@ -429,6 +430,10 @@ export const CONFIG_DEFINITIONS = {
     envVarName: 'MAX_FILE_SIZE',
     envVarName: 'MAX_FILE_SIZE',
     defaultValue: Infinity,
     defaultValue: Infinity,
   }),
   }),
+  'app:fileUploadTimeout': defineConfig<number>({
+    envVarName: 'FILE_UPLOAD_TIMEOUT',
+    defaultValue: 10 * 60 * 1000, // 10 minutes
+  }),
   'app:fileUploadTotalLimit': defineConfig<number>({
   'app:fileUploadTotalLimit': defineConfig<number>({
     envVarName: 'FILE_UPLOAD_TOTAL_LIMIT',
     envVarName: 'FILE_UPLOAD_TOTAL_LIMIT',
     defaultValue: Infinity,
     defaultValue: Infinity,

+ 1 - 1
apps/app/src/server/service/export.ts

@@ -13,7 +13,7 @@ import CollectionProgressingStatus from '../models/vo/collection-progressing-sta
 
 
 import type AppService from './app';
 import type AppService from './app';
 import { configManager } from './config-manager';
 import { configManager } from './config-manager';
-import type GrowiBridgeService from './growi-bridge';
+import type { GrowiBridgeService } from './growi-bridge';
 import { growiInfoService } from './growi-info';
 import { growiInfoService } from './growi-info';
 import type { ZipFileStat } from './interfaces/export';
 import type { ZipFileStat } from './interfaces/export';
 
 

+ 59 - 20
apps/app/src/server/service/file-uploader/aws/index.ts

@@ -28,7 +28,7 @@ import { configManager } from '../../config-manager';
 import {
 import {
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
 } from '../file-uploader';
 } from '../file-uploader';
-import { ContentHeaders } from '../utils';
+import { createContentHeaders, getContentHeaderValue } from '../utils';
 
 
 import { AwsMultipartUploader } from './multipart-uploader';
 import { AwsMultipartUploader } from './multipart-uploader';
 
 
@@ -44,6 +44,8 @@ interface FileMeta {
   size: number;
   size: number;
 }
 }
 
 
+// Cache holder to avoid repeated instantiation of S3 client
+let cachedS3Client: { configKey: string, client: S3Client } | null = null;
 const isFileExists = async(s3: S3Client, params: HeadObjectCommandInput) => {
 const isFileExists = async(s3: S3Client, params: HeadObjectCommandInput) => {
   try {
   try {
     await s3.send(new HeadObjectCommand(params));
     await s3.send(new HeadObjectCommand(params));
@@ -86,12 +88,21 @@ const getS3Bucket = (): NonBlankString | undefined => {
 };
 };
 
 
 const S3Factory = (): S3Client => {
 const S3Factory = (): S3Client => {
+  // Cache key based on configuration values to detect changes
   const accessKeyId = configManager.getConfig('aws:s3AccessKeyId');
   const accessKeyId = configManager.getConfig('aws:s3AccessKeyId');
   const secretAccessKey = configManager.getConfig('aws:s3SecretAccessKey');
   const secretAccessKey = configManager.getConfig('aws:s3SecretAccessKey');
   const s3Region = toNonBlankStringOrUndefined(configManager.getConfig('aws:s3Region')); // Blank strings may remain in the DB, so convert with toNonBlankStringOrUndefined for safety
   const s3Region = toNonBlankStringOrUndefined(configManager.getConfig('aws:s3Region')); // Blank strings may remain in the DB, so convert with toNonBlankStringOrUndefined for safety
   const s3CustomEndpoint = toNonBlankStringOrUndefined(configManager.getConfig('aws:s3CustomEndpoint'));
   const s3CustomEndpoint = toNonBlankStringOrUndefined(configManager.getConfig('aws:s3CustomEndpoint'));
 
 
-  return new S3Client({
+  const configKey = `${accessKeyId ?? ''}|${secretAccessKey ?? ''}|${s3Region ?? ''}|${s3CustomEndpoint ?? ''}`;
+
+  // Return cached client if configuration hasn't changed
+  if (cachedS3Client != null && cachedS3Client.configKey === configKey) {
+    return cachedS3Client.client;
+  }
+
+  // Create new client instance with connection pooling optimizations
+  const client = new S3Client({
     credentials: accessKeyId != null && secretAccessKey != null
     credentials: accessKeyId != null && secretAccessKey != null
       ? {
       ? {
         accessKeyId,
         accessKeyId,
@@ -102,6 +113,10 @@ const S3Factory = (): S3Client => {
     endpoint: s3CustomEndpoint,
     endpoint: s3CustomEndpoint,
     forcePathStyle: s3CustomEndpoint != null, // s3ForcePathStyle renamed to forcePathStyle in v3
     forcePathStyle: s3CustomEndpoint != null, // s3ForcePathStyle renamed to forcePathStyle in v3
   });
   });
+
+  // Cache the new client
+  cachedS3Client = { configKey, client };
+  return client;
 };
 };
 
 
 const getFilePathOnStorage = (attachment: IAttachmentDocument) => {
 const getFilePathOnStorage = (attachment: IAttachmentDocument) => {
@@ -177,17 +192,38 @@ class AwsFileUploader extends AbstractFileUploader {
     const s3 = S3Factory();
     const s3 = S3Factory();
 
 
     const filePath = getFilePathOnStorage(attachment);
     const filePath = getFilePathOnStorage(attachment);
-    const contentHeaders = new ContentHeaders(attachment);
+    const contentHeaders = createContentHeaders(attachment);
 
 
-    await s3.send(new PutObjectCommand({
-      Bucket: getS3Bucket(),
-      Key: filePath,
-      Body: readable,
-      ACL: getS3PutObjectCannedAcl(),
-      // put type and the file name for reference information when uploading
-      ContentType: contentHeaders.contentType?.value.toString(),
-      ContentDisposition: contentHeaders.contentDisposition?.value.toString(),
-    }));
+    try {
+      const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+
+      await s3.send(
+        new PutObjectCommand({
+          Bucket: getS3Bucket(),
+          Key: filePath,
+          Body: readable,
+          ACL: getS3PutObjectCannedAcl(),
+          // put type and the file name for reference information when uploading
+          ContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+          ContentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
+        }),
+        { abortSignal: AbortSignal.timeout(uploadTimeout) },
+      );
+
+      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
+    }
+    catch (error) {
+      // Handle timeout error specifically
+      if (error.name === 'AbortError') {
+        logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
+      }
+      else {
+        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      }
+      // Re-throw the error to be handled by the caller.
+      // The pipeline automatically handles stream cleanup on error.
+      throw error;
+    }
   }
   }
 
 
   /**
   /**
@@ -205,7 +241,7 @@ class AwsFileUploader extends AbstractFileUploader {
       throw new Error('AWS is not configured.');
       throw new Error('AWS is not configured.');
     }
     }
 
 
-    const s3 = S3Factory();
+    const s3 = S3Factory(); // Use singleton client
     const filePath = getFilePathOnStorage(attachment);
     const filePath = getFilePathOnStorage(attachment);
 
 
     const params = {
     const params = {
@@ -220,20 +256,20 @@ class AwsFileUploader extends AbstractFileUploader {
     }
     }
 
 
     try {
     try {
-      const body = (await s3.send(new GetObjectCommand(params))).Body;
+      const response = await s3.send(new GetObjectCommand(params));
+      const body = response.Body;
 
 
       if (body == null) {
       if (body == null) {
         throw new Error(`S3 returned null for the Attachment (${filePath})`);
         throw new Error(`S3 returned null for the Attachment (${filePath})`);
       }
       }
 
 
-      // eslint-disable-next-line no-nested-ternary
       return 'stream' in body
       return 'stream' in body
         ? body.stream() as unknown as NodeJS.ReadableStream // get stream from Blob and cast force
         ? body.stream() as unknown as NodeJS.ReadableStream // get stream from Blob and cast force
         : body as unknown as NodeJS.ReadableStream; // cast force
         : body as unknown as NodeJS.ReadableStream; // cast force
     }
     }
     catch (err) {
     catch (err) {
-      logger.error(err);
-      throw new Error(`Coudn't get file from AWS for the Attachment (${attachment._id.toString()})`);
+      logger.error(`Failed to get file from AWS S3 for attachment ${attachment._id.toString()}:`, err);
+      throw new Error(`Couldn't get file from AWS for the Attachment (${attachment._id.toString()})`);
     }
     }
   }
   }
 
 
@@ -252,12 +288,12 @@ class AwsFileUploader extends AbstractFileUploader {
     // issue signed url (default: expires 120 seconds)
     // issue signed url (default: expires 120 seconds)
     // https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getSignedUrl-property
     // https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getSignedUrl-property
     const isDownload = opts?.download ?? false;
     const isDownload = opts?.download ?? false;
-    const contentHeaders = new ContentHeaders(attachment, { inline: !isDownload });
+    const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
     const params: GetObjectCommandInput = {
     const params: GetObjectCommandInput = {
       Bucket: getS3Bucket(),
       Bucket: getS3Bucket(),
       Key: filePath,
       Key: filePath,
-      ResponseContentType: contentHeaders.contentType?.value.toString(),
-      ResponseContentDisposition: contentHeaders.contentDisposition?.value.toString(),
+      ResponseContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+      ResponseContentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
     };
     };
     const signedUrl = await getSignedUrl(s3, new GetObjectCommand(params), {
     const signedUrl = await getSignedUrl(s3, new GetObjectCommand(params), {
       expiresIn: lifetimeSecForTemporaryUrl,
       expiresIn: lifetimeSecForTemporaryUrl,
@@ -282,12 +318,15 @@ class AwsFileUploader extends AbstractFileUploader {
         Key: uploadKey,
         Key: uploadKey,
         UploadId: uploadId,
         UploadId: uploadId,
       }));
       }));
+      logger.debug(`Successfully aborted multipart upload: uploadKey=${uploadKey}, uploadId=${uploadId}`);
     }
     }
     catch (e) {
     catch (e) {
       // allow duplicate abort requests to ensure abortion
       // allow duplicate abort requests to ensure abortion
       if (e.response?.status !== 404) {
       if (e.response?.status !== 404) {
+        logger.error(`Failed to abort multipart upload: uploadKey=${uploadKey}, uploadId=${uploadId}`, e);
         throw e;
         throw e;
       }
       }
+      logger.debug(`Multipart upload already aborted: uploadKey=${uploadKey}, uploadId=${uploadId}`);
     }
     }
   }
   }
 
 

+ 81 - 16
apps/app/src/server/service/file-uploader/azure.ts

@@ -29,7 +29,7 @@ import { configManager } from '../config-manager';
 import {
 import {
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
 } from './file-uploader';
 } from './file-uploader';
-import { ContentHeaders } from './utils';
+import { createContentHeaders, getContentHeaderValue } from './utils';
 
 
 const urljoin = require('url-join');
 const urljoin = require('url-join');
 
 
@@ -45,6 +45,11 @@ type AzureConfig = {
   containerName: string,
   containerName: string,
 }
 }
 
 
+// Cache holders to avoid repeated instantiation of credential and clients
+let cachedCredential: { key: string, credential: TokenCredential } | null = null;
+let cachedBlobServiceClient: { key: string, client: BlobServiceClient } | null = null;
+let cachedContainerClient: { key: string, client: ContainerClient } | null = null;
+
 
 
 function getAzureConfig(): AzureConfig {
 function getAzureConfig(): AzureConfig {
   const accountName = configManager.getConfig('azure:storageAccountName');
   const accountName = configManager.getConfig('azure:storageAccountName');
@@ -61,6 +66,7 @@ function getAzureConfig(): AzureConfig {
 }
 }
 
 
 function getCredential(): TokenCredential {
 function getCredential(): TokenCredential {
+  // Build cache key from credential-related configs
   const tenantId = toNonBlankStringOrUndefined(configManager.getConfig('azure:tenantId'));
   const tenantId = toNonBlankStringOrUndefined(configManager.getConfig('azure:tenantId'));
   const clientId = toNonBlankStringOrUndefined(configManager.getConfig('azure:clientId'));
   const clientId = toNonBlankStringOrUndefined(configManager.getConfig('azure:clientId'));
   const clientSecret = toNonBlankStringOrUndefined(configManager.getConfig('azure:clientSecret'));
   const clientSecret = toNonBlankStringOrUndefined(configManager.getConfig('azure:clientSecret'));
@@ -69,13 +75,52 @@ function getCredential(): TokenCredential {
     throw new Error(`Azure Blob Storage missing required configuration: tenantId=${tenantId}, clientId=${clientId}, clientSecret=${clientSecret}`);
     throw new Error(`Azure Blob Storage missing required configuration: tenantId=${tenantId}, clientId=${clientId}, clientSecret=${clientSecret}`);
   }
   }
 
 
-  return new ClientSecretCredential(tenantId, clientId, clientSecret);
+  const key = `${tenantId}|${clientId}|${clientSecret}`;
+
+  // Reuse cached credential when config has not changed
+  if (cachedCredential != null && cachedCredential.key === key) {
+    return cachedCredential.credential;
+  }
+
+  const credential = new ClientSecretCredential(tenantId, clientId, clientSecret);
+  cachedCredential = { key, credential };
+  return credential;
+}
+
+function getBlobServiceClient(): BlobServiceClient {
+  const { accountName } = getAzureConfig();
+  // Include credential cache key to ensure we re-create if cred changed
+  const credential = getCredential();
+  const credentialKey = (cachedCredential?.key) ?? 'unknown-cred';
+  const key = `${accountName}|${credentialKey}`;
+
+  if (cachedBlobServiceClient != null && cachedBlobServiceClient.key === key) {
+    return cachedBlobServiceClient.client;
+  }
+
+  // Use keep-alive to minimize socket churn; reuse client across calls
+  const client = new BlobServiceClient(
+    `https://${accountName}.blob.core.windows.net`,
+    credential,
+    { keepAliveOptions: { enable: true } },
+  );
+  cachedBlobServiceClient = { key, client };
+  return client;
 }
 }
 
 
 async function getContainerClient(): Promise<ContainerClient> {
 async function getContainerClient(): Promise<ContainerClient> {
   const { accountName, containerName } = getAzureConfig();
   const { accountName, containerName } = getAzureConfig();
-  const blobServiceClient = new BlobServiceClient(`https://${accountName}.blob.core.windows.net`, getCredential());
-  return blobServiceClient.getContainerClient(containerName);
+  const credentialKey = (cachedCredential?.key) ?? 'unknown-cred';
+  const key = `${accountName}|${containerName}|${credentialKey}`;
+
+  if (cachedContainerClient != null && cachedContainerClient.key === key) {
+    return cachedContainerClient.client;
+  }
+
+  const blobServiceClient = getBlobServiceClient();
+  const client = blobServiceClient.getContainerClient(containerName);
+  cachedContainerClient = { key, client };
+  return client;
 }
 }
 
 
 function getFilePathOnStorage(attachment: IAttachmentDocument) {
 function getFilePathOnStorage(attachment: IAttachmentDocument) {
@@ -132,15 +177,34 @@ class AzureFileUploader extends AbstractFileUploader {
     const filePath = getFilePathOnStorage(attachment);
     const filePath = getFilePathOnStorage(attachment);
     const containerClient = await getContainerClient();
     const containerClient = await getContainerClient();
     const blockBlobClient: BlockBlobClient = containerClient.getBlockBlobClient(filePath);
     const blockBlobClient: BlockBlobClient = containerClient.getBlockBlobClient(filePath);
-    const contentHeaders = new ContentHeaders(attachment);
+    const contentHeaders = createContentHeaders(attachment);
 
 
-    await blockBlobClient.uploadStream(readable, undefined, undefined, {
-      blobHTTPHeaders: {
-        // put type and the file name for reference information when uploading
-        blobContentType: contentHeaders.contentType?.value.toString(),
-        blobContentDisposition: contentHeaders.contentDisposition?.value.toString(),
-      },
-    });
+    try {
+      const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+
+      await blockBlobClient.uploadStream(readable, undefined, undefined, {
+        blobHTTPHeaders: {
+          // put type and the file name for reference information when uploading
+          blobContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+          blobContentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
+        },
+        abortSignal: AbortSignal.timeout(uploadTimeout),
+      });
+
+      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
+    }
+    catch (error) {
+      // Handle timeout error specifically
+      if (error.name === 'AbortError') {
+        logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
+      }
+      else {
+        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      }
+      // Re-throw the error to be handled by the caller.
+      // The pipeline automatically handles stream cleanup on error.
+      throw error;
+    }
   }
   }
 
 
   /**
   /**
@@ -202,7 +266,8 @@ class AzureFileUploader extends AbstractFileUploader {
 
 
     const sasToken = await (async() => {
     const sasToken = await (async() => {
       const { accountName, containerName } = getAzureConfig();
       const { accountName, containerName } = getAzureConfig();
-      const blobServiceClient = new BlobServiceClient(`https://${accountName}.blob.core.windows.net`, getCredential());
+      // Reuse the same BlobServiceClient (singleton)
+      const blobServiceClient = getBlobServiceClient();
 
 
       const now = Date.now();
       const now = Date.now();
       const startsOn = new Date(now - 30 * 1000);
       const startsOn = new Date(now - 30 * 1000);
@@ -210,7 +275,7 @@ class AzureFileUploader extends AbstractFileUploader {
       const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);
       const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);
 
 
       const isDownload = opts?.download ?? false;
       const isDownload = opts?.download ?? false;
-      const contentHeaders = new ContentHeaders(attachment, { inline: !isDownload });
+      const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
 
 
       // https://github.com/Azure/azure-sdk-for-js/blob/d4d55f73/sdk/storage/storage-blob/src/ContainerSASPermissions.ts#L24
       // https://github.com/Azure/azure-sdk-for-js/blob/d4d55f73/sdk/storage/storage-blob/src/ContainerSASPermissions.ts#L24
       // r:read, a:add, c:create, w:write, d:delete, l:list
       // r:read, a:add, c:create, w:write, d:delete, l:list
@@ -221,8 +286,8 @@ class AzureFileUploader extends AbstractFileUploader {
         protocol: SASProtocol.HttpsAndHttp,
         protocol: SASProtocol.HttpsAndHttp,
         startsOn,
         startsOn,
         expiresOn,
         expiresOn,
-        contentType: contentHeaders.contentType?.value.toString(),
-        contentDisposition: contentHeaders.contentDisposition?.value.toString(),
+        contentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+        contentDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
       };
       };
 
 
       return generateBlobSASQueryParameters(sasOptions, userDelegationKey, accountName).toString();
       return generateBlobSASQueryParameters(sasOptions, userDelegationKey, accountName).toString();

+ 34 - 10
apps/app/src/server/service/file-uploader/gcs/index.ts

@@ -17,7 +17,7 @@ import { configManager } from '../../config-manager';
 import {
 import {
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
 } from '../file-uploader';
 } from '../file-uploader';
-import { ContentHeaders } from '../utils';
+import { createContentHeaders, getContentHeaderValue } from '../utils';
 
 
 import { GcsMultipartUploader } from './multipart-uploader';
 import { GcsMultipartUploader } from './multipart-uploader';
 
 
@@ -131,14 +131,38 @@ class GcsFileUploader extends AbstractFileUploader {
     const gcs = getGcsInstance();
     const gcs = getGcsInstance();
     const myBucket = gcs.bucket(getGcsBucket());
     const myBucket = gcs.bucket(getGcsBucket());
     const filePath = getFilePathOnStorage(attachment);
     const filePath = getFilePathOnStorage(attachment);
-    const contentHeaders = new ContentHeaders(attachment);
+    const contentHeaders = createContentHeaders(attachment);
 
 
     const file = myBucket.file(filePath);
     const file = myBucket.file(filePath);
-
-    await pipeline(readable, file.createWriteStream({
+    const writeStream = file.createWriteStream({
       // put type and the file name for reference information when uploading
       // put type and the file name for reference information when uploading
-      contentType: contentHeaders.contentType?.value.toString(),
-    }));
+      contentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+    });
+
+    try {
+      const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+
+      // Use AbortSignal.timeout() for robust timeout handling (Node.js 16+)
+      await pipeline(
+        readable,
+        writeStream,
+        { signal: AbortSignal.timeout(uploadTimeout) },
+      );
+
+      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
+    }
+    catch (error) {
+      // Handle timeout error specifically
+      if (error.name === 'AbortError') {
+        logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
+      }
+      else {
+        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      }
+      // Re-throw the error to be handled by the caller.
+      // The pipeline automatically handles stream cleanup on error.
+      throw error;
+    }
   }
   }
 
 
   /**
   /**
@@ -172,7 +196,7 @@ class GcsFileUploader extends AbstractFileUploader {
     }
     }
     catch (err) {
     catch (err) {
       logger.error(err);
       logger.error(err);
-      throw new Error(`Coudn't get file from AWS for the Attachment (${attachment._id.toString()})`);
+      throw new Error(`Coudn't get file from GCS for the Attachment (${attachment._id.toString()})`);
     }
     }
   }
   }
 
 
@@ -193,12 +217,12 @@ class GcsFileUploader extends AbstractFileUploader {
     // issue signed url (default: expires 120 seconds)
     // issue signed url (default: expires 120 seconds)
     // https://cloud.google.com/storage/docs/access-control/signed-urls
     // https://cloud.google.com/storage/docs/access-control/signed-urls
     const isDownload = opts?.download ?? false;
     const isDownload = opts?.download ?? false;
-    const contentHeaders = new ContentHeaders(attachment, { inline: !isDownload });
+    const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
     const [signedUrl] = await file.getSignedUrl({
     const [signedUrl] = await file.getSignedUrl({
       action: 'read',
       action: 'read',
       expires: Date.now() + lifetimeSecForTemporaryUrl * 1000,
       expires: Date.now() + lifetimeSecForTemporaryUrl * 1000,
-      responseType: contentHeaders.contentType?.value.toString(),
-      responseDisposition: contentHeaders.contentDisposition?.value.toString(),
+      responseType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+      responseDisposition: getContentHeaderValue(contentHeaders, 'Content-Disposition'),
     });
     });
 
 
     return {
     return {

+ 114 - 52
apps/app/src/server/service/file-uploader/gridfs.ts

@@ -12,20 +12,69 @@ import loggerFactory from '~/utils/logger';
 import { configManager } from '../config-manager';
 import { configManager } from '../config-manager';
 
 
 import { AbstractFileUploader, type TemporaryUrl, type SaveFileParam } from './file-uploader';
 import { AbstractFileUploader, type TemporaryUrl, type SaveFileParam } from './file-uploader';
-import { ContentHeaders } from './utils';
+import { createContentHeaders, getContentHeaderValue } from './utils';
 
 
 const logger = loggerFactory('growi:service:fileUploaderGridfs');
 const logger = loggerFactory('growi:service:fileUploaderGridfs');
 
 
-
 const COLLECTION_NAME = 'attachmentFiles';
 const COLLECTION_NAME = 'attachmentFiles';
 const CHUNK_COLLECTION_NAME = `${COLLECTION_NAME}.chunks`;
 const CHUNK_COLLECTION_NAME = `${COLLECTION_NAME}.chunks`;
 
 
-// instantiate mongoose-gridfs
-const AttachmentFile = createModel({
-  modelName: COLLECTION_NAME,
-  bucketName: COLLECTION_NAME,
-  connection: mongoose.connection,
-});
+type PromisifiedUtils = {
+  read: (options?: object) => Readable;
+  // eslint-disable-next-line @typescript-eslint/ban-types
+  write: (file: object, stream: Readable, done?: Function) => void;
+  // eslint-disable-next-line @typescript-eslint/ban-types
+  unlink: (file: object, done?: Function) => void;
+  promisifiedWrite: (file: object, readable: Readable) => Promise<any>;
+  promisifiedUnlink: (file: object) => Promise<any>;
+}
+
+type AttachmentFileModel = mongoose.Model<any> & PromisifiedUtils;
+
+// Cache holders to avoid repeated model creation and manage lifecycle
+let cachedAttachmentFileModel: AttachmentFileModel;
+let cachedChunkCollection: mongoose.Collection;
+let cachedConnection: mongoose.Connection; // Track the connection instance itself
+
+/**
+ * Initialize GridFS models with connection instance monitoring
+ * This prevents memory leaks from repeated model creation
+ */
+function initializeGridFSModels(): { attachmentFileModel: AttachmentFileModel, chunkCollection: mongoose.Collection } {
+  // Check if we can reuse cached models by comparing connection instance
+  if (cachedAttachmentFileModel != null && cachedChunkCollection != null && cachedConnection === mongoose.connection) {
+    return { attachmentFileModel: cachedAttachmentFileModel, chunkCollection: cachedChunkCollection };
+  }
+
+  // Check connection state
+  if (mongoose.connection.readyState !== 1) {
+    throw new Error('MongoDB connection is not ready for GridFS operations');
+  }
+
+  // Create new model instances
+  const attachmentFileModel: AttachmentFileModel = createModel({
+    modelName: COLLECTION_NAME,
+    bucketName: COLLECTION_NAME,
+    connection: mongoose.connection,
+  });
+
+  const chunkCollection = mongoose.connection.collection(CHUNK_COLLECTION_NAME);
+
+  // Setup promisified methods on the model instance (not globally)
+  if (!attachmentFileModel.promisifiedWrite) {
+    attachmentFileModel.promisifiedWrite = util.promisify(attachmentFileModel.write).bind(attachmentFileModel);
+    attachmentFileModel.promisifiedUnlink = util.promisify(attachmentFileModel.unlink).bind(attachmentFileModel);
+  }
+
+  // Cache the instances
+  cachedAttachmentFileModel = attachmentFileModel;
+  cachedChunkCollection = chunkCollection;
+  cachedConnection = mongoose.connection;
+
+  logger.debug('GridFS models initialized successfully');
+
+  return { attachmentFileModel, chunkCollection };
+}
 
 
 
 
 // TODO: rewrite this module to be a type-safe implementation
 // TODO: rewrite this module to be a type-safe implementation
@@ -65,13 +114,14 @@ class GridfsFileUploader extends AbstractFileUploader {
   override async uploadAttachment(readable: Readable, attachment: IAttachmentDocument): Promise<void> {
   override async uploadAttachment(readable: Readable, attachment: IAttachmentDocument): Promise<void> {
     logger.debug(`File uploading: fileName=${attachment.fileName}`);
     logger.debug(`File uploading: fileName=${attachment.fileName}`);
 
 
-    const contentHeaders = new ContentHeaders(attachment);
+    const { attachmentFileModel } = initializeGridFSModels();
+    const contentHeaders = createContentHeaders(attachment);
 
 
-    return AttachmentFile.promisifiedWrite(
+    return attachmentFileModel.promisifiedWrite(
       {
       {
         // put type and the file name for reference information when uploading
         // put type and the file name for reference information when uploading
         filename: attachment.fileName,
         filename: attachment.fileName,
-        contentType: contentHeaders.contentType?.value.toString(),
+        contentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
       },
       },
       readable,
       readable,
     );
     );
@@ -104,60 +154,42 @@ class GridfsFileUploader extends AbstractFileUploader {
 module.exports = function(crowi: Crowi) {
 module.exports = function(crowi: Crowi) {
   const lib = new GridfsFileUploader(crowi);
   const lib = new GridfsFileUploader(crowi);
 
 
-  // get Collection instance of chunk
-  const chunkCollection = mongoose.connection.collection(CHUNK_COLLECTION_NAME);
-
-  // create promisified method
-  AttachmentFile.promisifiedWrite = util.promisify(AttachmentFile.write).bind(AttachmentFile);
-  AttachmentFile.promisifiedUnlink = util.promisify(AttachmentFile.unlink).bind(AttachmentFile);
-
   lib.isValidUploadSettings = function() {
   lib.isValidUploadSettings = function() {
     return true;
     return true;
   };
   };
 
 
   (lib as any).deleteFile = async function(attachment) {
   (lib as any).deleteFile = async function(attachment) {
+    const { attachmentFileModel } = initializeGridFSModels();
     const filenameValue = attachment.fileName;
     const filenameValue = attachment.fileName;
 
 
-    const attachmentFile = await AttachmentFile.findOne({ filename: filenameValue });
+    const attachmentFile = await attachmentFileModel.findOne({ filename: filenameValue });
 
 
     if (attachmentFile == null) {
     if (attachmentFile == null) {
       logger.warn(`Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in GridFS`);
       logger.warn(`Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in GridFS`);
       return;
       return;
     }
     }
-    return AttachmentFile.promisifiedUnlink({ _id: attachmentFile._id });
+
+    return attachmentFileModel.promisifiedUnlink({ _id: attachmentFile._id });
   };
   };
 
 
+  /**
+   * Bulk delete files since unlink method of mongoose-gridfs does not support bulk operation
+   */
   (lib as any).deleteFiles = async function(attachments) {
   (lib as any).deleteFiles = async function(attachments) {
+    const { attachmentFileModel, chunkCollection } = initializeGridFSModels();
+
     const filenameValues = attachments.map((attachment) => {
     const filenameValues = attachments.map((attachment) => {
       return attachment.fileName;
       return attachment.fileName;
     });
     });
-    const fileIdObjects = await AttachmentFile.find({ filename: { $in: filenameValues } }, { _id: 1 });
+    const fileIdObjects = await attachmentFileModel.find({ filename: { $in: filenameValues } }, { _id: 1 });
     const idsRelatedFiles = fileIdObjects.map((obj) => { return obj._id });
     const idsRelatedFiles = fileIdObjects.map((obj) => { return obj._id });
 
 
     return Promise.all([
     return Promise.all([
-      AttachmentFile.deleteMany({ filename: { $in: filenameValues } }),
+      attachmentFileModel.deleteMany({ filename: { $in: filenameValues } }),
       chunkCollection.deleteMany({ files_id: { $in: idsRelatedFiles } }),
       chunkCollection.deleteMany({ files_id: { $in: idsRelatedFiles } }),
     ]);
     ]);
   };
   };
 
 
-  /**
-   * get size of data uploaded files using (Promise wrapper)
-   */
-  // const getCollectionSize = () => {
-  //   return new Promise((resolve, reject) => {
-  //     chunkCollection.stats((err, data) => {
-  //       if (err) {
-  //         // return 0 if not exist
-  //         if (err.errmsg.includes('not found')) {
-  //           return resolve(0);
-  //         }
-  //         return reject(err);
-  //       }
-  //       return resolve(data.size);
-  //     });
-  //   });
-  // };
-
   /**
   /**
    * check the file size limit
    * check the file size limit
    *
    *
@@ -172,17 +204,44 @@ module.exports = function(crowi: Crowi) {
   };
   };
 
 
   lib.saveFile = async function({ filePath, contentType, data }) {
   lib.saveFile = async function({ filePath, contentType, data }) {
-    const readable = new Readable();
-    readable.push(data);
-    readable.push(null); // EOF
+    const { attachmentFileModel } = initializeGridFSModels();
 
 
-    return AttachmentFile.promisifiedWrite(
-      {
-        filename: filePath,
-        contentType,
+    // Create a readable stream from the data
+    const readable = new Readable({
+      read() {
+        this.push(data);
+        this.push(null); // EOF
       },
       },
-      readable,
-    );
+    });
+
+    try {
+      // Add error handling to prevent resource leaks
+      readable.on('error', (err) => {
+        logger.error('Readable stream error:', err);
+        readable.destroy();
+        throw err;
+      });
+
+      // Use async/await for cleaner code
+      const result = await attachmentFileModel.promisifiedWrite(
+        {
+          filename: filePath,
+          contentType,
+        },
+        readable,
+      );
+
+      return result;
+    }
+    catch (error) {
+      throw error;
+    }
+    finally {
+      // Explicit cleanup to prevent memory leaks
+      if (typeof readable.destroy === 'function') {
+        readable.destroy();
+      }
+    }
   };
   };
 
 
   /**
   /**
@@ -192,23 +251,26 @@ module.exports = function(crowi: Crowi) {
    * @return {stream.Readable} readable stream
    * @return {stream.Readable} readable stream
    */
    */
   lib.findDeliveryFile = async function(attachment) {
   lib.findDeliveryFile = async function(attachment) {
+    const { attachmentFileModel } = initializeGridFSModels();
     const filenameValue = attachment.fileName;
     const filenameValue = attachment.fileName;
 
 
-    const attachmentFile = await AttachmentFile.findOne({ filename: filenameValue });
+    const attachmentFile = await attachmentFileModel.findOne({ filename: filenameValue });
 
 
     if (attachmentFile == null) {
     if (attachmentFile == null) {
       throw new Error(`Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in GridFS`);
       throw new Error(`Any AttachmentFile that relate to the Attachment (${attachment._id.toString()}) does not exist in GridFS`);
     }
     }
 
 
     // return stream.Readable
     // return stream.Readable
-    return AttachmentFile.read({ _id: attachmentFile._id });
+    return attachmentFileModel.read({ _id: attachmentFile._id });
   };
   };
 
 
   /**
   /**
    * List files in storage
    * List files in storage
    */
    */
   (lib as any).listFiles = async function() {
   (lib as any).listFiles = async function() {
-    const attachmentFiles = await AttachmentFile.find();
+    const { attachmentFileModel } = initializeGridFSModels();
+
+    const attachmentFiles = await attachmentFileModel.find();
     return attachmentFiles.map(({ filename: name, length: size }) => ({
     return attachmentFiles.map(({ filename: name, length: size }) => ({
       name, size,
       name, size,
     }));
     }));

+ 25 - 4
apps/app/src/server/service/file-uploader/local.ts

@@ -15,7 +15,7 @@ import {
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
   AbstractFileUploader, type TemporaryUrl, type SaveFileParam,
 } from './file-uploader';
 } from './file-uploader';
 import {
 import {
-  ContentHeaders, applyHeaders,
+  applyHeaders, createContentHeaders, toExpressHttpHeaders,
 } from './utils';
 } from './utils';
 
 
 
 
@@ -166,7 +166,28 @@ module.exports = function(crowi: Crowi) {
 
 
     const writeStream: Writable = fs.createWriteStream(filePath);
     const writeStream: Writable = fs.createWriteStream(filePath);
 
 
-    return pipeline(fileStream, writeStream);
+    try {
+      const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+      await pipeline(
+        fileStream,
+        writeStream,
+        { signal: AbortSignal.timeout(uploadTimeout) },
+      );
+
+      logger.debug(`File upload completed successfully: fileName=${attachment.fileName}`);
+    }
+    catch (error) {
+      // Handle timeout error specifically
+      if (error.name === 'AbortError') {
+        logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
+      }
+      else {
+        logger.error(`File upload failed: fileName=${attachment.fileName}`, error);
+      }
+      // Re-throw the error to be handled by the caller.
+      // The pipeline automatically handles stream cleanup on error.
+      throw error;
+    }
   };
   };
 
 
   lib.saveFile = async function({ filePath, contentType, data }) {
   lib.saveFile = async function({ filePath, contentType, data }) {
@@ -229,9 +250,9 @@ module.exports = function(crowi: Crowi) {
     const internalPath = urljoin(internalPathRoot, relativePath);
     const internalPath = urljoin(internalPathRoot, relativePath);
 
 
     const isDownload = opts?.download ?? false;
     const isDownload = opts?.download ?? false;
-    const contentHeaders = new ContentHeaders(attachment, { inline: !isDownload });
+    const contentHeaders = createContentHeaders(attachment, { inline: !isDownload });
     applyHeaders(res, [
     applyHeaders(res, [
-      ...contentHeaders.toExpressHttpHeaders(),
+      ...toExpressHttpHeaders(contentHeaders),
       { field: 'X-Accel-Redirect', value: internalPath },
       { field: 'X-Accel-Redirect', value: internalPath },
       { field: 'X-Sendfile', value: storagePath },
       { field: 'X-Sendfile', value: storagePath },
     ]);
     ]);

+ 47 - 47
apps/app/src/server/service/file-uploader/utils/headers.ts

@@ -1,68 +1,68 @@
 import type { Response } from 'express';
 import type { Response } from 'express';
 
 
-import type { ExpressHttpHeader, IContentHeaders } from '~/server/interfaces/attachment';
+import type { ExpressHttpHeader } from '~/server/interfaces/attachment';
 import type { IAttachmentDocument } from '~/server/models/attachment';
 import type { IAttachmentDocument } from '~/server/models/attachment';
 
 
+type ContentHeaderField = 'Content-Type' | 'Content-Security-Policy' | 'Content-Disposition' | 'Content-Length';
+type ContentHeader = ExpressHttpHeader<ContentHeaderField>;
 
 
-export class ContentHeaders implements IContentHeaders {
-
-  contentType?: ExpressHttpHeader<'Content-Type'>;
+/**
+ * Factory function to generate content headers.
+ * This approach avoids creating a class instance for each call, improving memory efficiency.
+ */
+export const createContentHeaders = (attachment: IAttachmentDocument, opts?: { inline?: boolean }): ContentHeader[] => {
+  const headers: ContentHeader[] = [];
 
 
-  contentLength?: ExpressHttpHeader<'Content-Length'>;
+  // Content-Type
+  headers.push({
+    field: 'Content-Type',
+    value: attachment.fileFormat,
+  });
 
 
-  contentSecurityPolicy?: ExpressHttpHeader<'Content-Security-Policy'>;
+  // Content-Security-Policy
+  headers.push({
+    field: 'Content-Security-Policy',
+    // eslint-disable-next-line max-len
+    value: "script-src 'unsafe-hashes'; style-src 'self' 'unsafe-inline'; object-src 'none'; require-trusted-types-for 'script'; media-src 'self'; default-src 'none';",
+  });
 
 
-  contentDisposition?: ExpressHttpHeader<'Content-Disposition'>;
+  // Content-Disposition
+  headers.push({
+    field: 'Content-Disposition',
+    value: `${opts?.inline ? 'inline' : 'attachment'};filename*=UTF-8''${encodeURIComponent(attachment.originalName)}`,
+  });
 
 
-  constructor(attachment: IAttachmentDocument, opts?: {
-    inline?: boolean,
-  }) {
+  // Content-Length
+  if (attachment.fileSize != null) {
+    headers.push({
+      field: 'Content-Length',
+      value: attachment.fileSize.toString(),
+    });
+  }
 
 
-    this.contentType = {
-      field: 'Content-Type',
-      value: attachment.fileFormat,
-    };
-    this.contentSecurityPolicy = {
-      field: 'Content-Security-Policy',
-      // eslint-disable-next-line max-len
-      value: "script-src 'unsafe-hashes'; style-src 'self' 'unsafe-inline'; object-src 'none'; require-trusted-types-for 'script'; media-src 'self'; default-src 'none';",
-    };
-    this.contentDisposition = {
-      field: 'Content-Disposition',
-      value: `${opts?.inline ? 'inline' : 'attachment'};filename*=UTF-8''${encodeURIComponent(attachment.originalName)}`,
-    };
+  return headers;
+};
 
 
-    if (attachment.fileSize) {
-      this.contentLength = {
-        field: 'Content-Length',
-        value: attachment.fileSize.toString(),
-      };
-    }
-  }
+export const getContentHeaderValue = (contentHeaders: ContentHeader[], field: ContentHeaderField): string | undefined => {
+  const header = contentHeaders.find(h => h.field === field);
+  return header?.value.toString();
+};
 
 
-  /**
-   * Convert to ExpressHttpHeader[]
-   */
-  toExpressHttpHeaders(): ExpressHttpHeader[] {
-    return [
-      this.contentType,
-      this.contentLength,
-      this.contentSecurityPolicy,
-      this.contentDisposition,
-    ]
+/**
+ * Convert to ExpressHttpHeader[]
+ */
+export function toExpressHttpHeaders(records: Record<string, string | string[]>): ExpressHttpHeader[];
+export function toExpressHttpHeaders(contentHeaders: ContentHeader[]): ExpressHttpHeader[];
+export function toExpressHttpHeaders(arg: Record<string, string | string[]> | ContentHeader[]): ExpressHttpHeader[] {
+  if (Array.isArray(arg)) {
+    return arg
       // exclude undefined
       // exclude undefined
       .filter((member): member is NonNullable<typeof member> => member != null);
       .filter((member): member is NonNullable<typeof member> => member != null);
   }
   }
 
 
+  return Object.entries(arg).map(([field, value]) => { return { field, value } });
 }
 }
 
 
-/**
- * Convert Record to ExpressHttpHeader[]
- */
-export const toExpressHttpHeaders = (records: Record<string, string | string[]>): ExpressHttpHeader[] => {
-  return Object.entries(records).map(([field, value]) => { return { field, value } });
-};
-
 export const applyHeaders = (res: Response, headers: ExpressHttpHeader[]): void => {
 export const applyHeaders = (res: Response, headers: ExpressHttpHeader[]): void => {
   headers.forEach((header) => {
   headers.forEach((header) => {
     res.header(header.field, header.value);
     res.header(header.field, header.value);

+ 6 - 6
apps/app/src/server/service/g2g-transfer.ts

@@ -189,7 +189,7 @@ interface Receiver {
     innerFileStats: any[],
     innerFileStats: any[],
     optionsMap: { [key: string]: GrowiArchiveImportOption; },
     optionsMap: { [key: string]: GrowiArchiveImportOption; },
     operatorUserId: string,
     operatorUserId: string,
-  ): { [key: string]: ImportSettings; }
+  ): Map<string, ImportSettings>
   /**
   /**
    * Import collections
    * Import collections
    * @param {string} collections Array of collection name
    * @param {string} collections Array of collection name
@@ -198,7 +198,7 @@ interface Receiver {
    */
    */
   importCollections(
   importCollections(
     collections: string[],
     collections: string[],
-    importSettingsMap: { [key: string]: ImportSettings; },
+    importSettingsMap: Map<string, ImportSettings>,
     sourceGROWIUploadConfigs: FileUploadConfigs,
     sourceGROWIUploadConfigs: FileUploadConfigs,
   ): Promise<void>
   ): Promise<void>
   /**
   /**
@@ -618,8 +618,8 @@ export class G2GTransferReceiverService implements Receiver {
       innerFileStats: any[],
       innerFileStats: any[],
       optionsMap: { [key: string]: GrowiArchiveImportOption; },
       optionsMap: { [key: string]: GrowiArchiveImportOption; },
       operatorUserId: string,
       operatorUserId: string,
-  ): { [key: string]: ImportSettings; } {
-    const importSettingsMap = {};
+  ): Map<string, ImportSettings> {
+    const importSettingsMap = new Map<string, ImportSettings>();
     innerFileStats.forEach(({ fileName, collectionName }) => {
     innerFileStats.forEach(({ fileName, collectionName }) => {
       const options = new GrowiArchiveImportOption(collectionName, undefined, optionsMap[collectionName]);
       const options = new GrowiArchiveImportOption(collectionName, undefined, optionsMap[collectionName]);
 
 
@@ -641,7 +641,7 @@ export class G2GTransferReceiverService implements Receiver {
         jsonFileName: fileName,
         jsonFileName: fileName,
         overwriteParams: generateOverwriteParams(collectionName, operatorUserId, options),
         overwriteParams: generateOverwriteParams(collectionName, operatorUserId, options),
       };
       };
-      importSettingsMap[collectionName] = importSettings;
+      importSettingsMap.set(collectionName, importSettings);
     });
     });
 
 
     return importSettingsMap;
     return importSettingsMap;
@@ -649,7 +649,7 @@ export class G2GTransferReceiverService implements Receiver {
 
 
   public async importCollections(
   public async importCollections(
       collections: string[],
       collections: string[],
-      importSettingsMap: { [key: string]: ImportSettings; },
+      importSettingsMap: Map<string, ImportSettings>,
       sourceGROWIUploadConfigs: FileUploadConfigs,
       sourceGROWIUploadConfigs: FileUploadConfigs,
   ): Promise<void> {
   ): Promise<void> {
     const { appService } = this.crowi;
     const { appService } = this.crowi;

+ 1 - 3
apps/app/src/server/service/growi-bridge/index.ts

@@ -19,7 +19,7 @@ const logger = loggerFactory('growi:services:GrowiBridgeService'); // eslint-dis
  * the service class for bridging GROWIs (export and import)
  * the service class for bridging GROWIs (export and import)
  * common properties and methods between export service and import service are defined in this service
  * common properties and methods between export service and import service are defined in this service
  */
  */
-class GrowiBridgeService {
+export class GrowiBridgeService {
 
 
   crowi: Crowi;
   crowi: Crowi;
 
 
@@ -123,5 +123,3 @@ class GrowiBridgeService {
   }
   }
 
 
 }
 }
-
-export default GrowiBridgeService;

+ 1 - 1
apps/app/src/server/service/i18next.ts

@@ -7,7 +7,7 @@ import resourcesToBackend from 'i18next-resources-to-backend';
 
 
 import * as i18nextConfig from '^/config/i18next.config';
 import * as i18nextConfig from '^/config/i18next.config';
 
 
-import { resolveFromRoot } from '~/utils/project-dir-utils';
+import { resolveFromRoot } from '~/server/util/project-dir-utils';
 
 
 import { configManager } from './config-manager';
 import { configManager } from './config-manager';
 
 

+ 23 - 3
apps/app/src/server/service/import/construct-convert-map.ts

@@ -10,10 +10,29 @@ export type ConvertMap = {
   }
   }
 }
 }
 
 
+/**
+ * Special conversion functions for problematic fields
+ * Add entries here for fields that require custom handling during import
+ */
+const SPECIAL_CONVERT_FUNCTIONS: Record<string, Record<string, OverwriteFunction>> = {
+  activities: {
+    snapshot: (value: unknown) => value, // Skip SubdocumentPath casting to avoid Mongoose errors
+  },
+  // Add more collections and fields as needed:
+  // otherCollection: {
+  //   problematicField: (value: unknown) => customProcessing(value),
+  // },
+};
+
+/**
+ * Get special conversion function for a specific collection.field combination
+ */
+const getSpecialConvertFunction = (collectionName: string, propertyName: string): OverwriteFunction | null => {
+  return SPECIAL_CONVERT_FUNCTIONS[collectionName]?.[propertyName] ?? null;
+};
+
 /**
 /**
  * Initialize convert map. set keepOriginal as default
  * Initialize convert map. set keepOriginal as default
- *
- * @param {Crowi} crowi Crowi instance
  */
  */
 export const constructConvertMap = (): ConvertMap => {
 export const constructConvertMap = (): ConvertMap => {
   const convertMap: ConvertMap = {};
   const convertMap: ConvertMap = {};
@@ -30,7 +49,8 @@ export const constructConvertMap = (): ConvertMap => {
     convertMap[collectionName] = {};
     convertMap[collectionName] = {};
 
 
     for (const key of Object.keys(model.schema.paths)) {
     for (const key of Object.keys(model.schema.paths)) {
-      convertMap[collectionName][key] = keepOriginal;
+      const specialHandler = getSpecialConvertFunction(collectionName, key);
+      convertMap[collectionName][key] = specialHandler ?? keepOriginal;
     }
     }
   });
   });
 
 

+ 131 - 95
apps/app/src/server/service/import/import.ts

@@ -1,13 +1,13 @@
 import fs from 'fs';
 import fs from 'fs';
 import path from 'path';
 import path from 'path';
 import type { EventEmitter } from 'stream';
 import type { EventEmitter } from 'stream';
-import { Writable, Transform, pipeline } from 'stream';
-import { finished, pipeline as pipelinePromise } from 'stream/promises';
+import { Writable, Transform } from 'stream';
+import { pipeline } from 'stream/promises';
 
 
 import JSONStream from 'JSONStream';
 import JSONStream from 'JSONStream';
 import gc from 'expose-gc/function';
 import gc from 'expose-gc/function';
 import type {
 import type {
-  BulkWriteResult, MongoBulkWriteError, UnorderedBulkOperation, WriteError,
+  BulkWriteResult, MongoBulkWriteError, UnorderedBulkOperation, WriteError, BulkOperationBase,
 } from 'mongodb';
 } from 'mongodb';
 import type { Document } from 'mongoose';
 import type { Document } from 'mongoose';
 import mongoose from 'mongoose';
 import mongoose from 'mongoose';
@@ -51,6 +51,8 @@ class ImportingCollectionError extends Error {
 
 
 export class ImportService {
 export class ImportService {
 
 
+  private modelCache: Map<string, { Model: any, schema: any }> = new Map();
+
   private crowi: Crowi;
   private crowi: Crowi;
 
 
   private growiBridgeService: any;
   private growiBridgeService: any;
@@ -59,7 +61,7 @@ export class ImportService {
 
 
   private currentProgressingStatus: CollectionProgressingStatus | null;
   private currentProgressingStatus: CollectionProgressingStatus | null;
 
 
-  private convertMap: ConvertMap;
+  private convertMap: ConvertMap | undefined;
 
 
   constructor(crowi: Crowi) {
   constructor(crowi: Crowi) {
     this.crowi = crowi;
     this.crowi = crowi;
@@ -139,7 +141,7 @@ export class ImportService {
    * @param collections MongoDB collection name
    * @param collections MongoDB collection name
    * @param importSettingsMap
    * @param importSettingsMap
    */
    */
-  async import(collections: string[], importSettingsMap: { [collectionName: string]: ImportSettings }): Promise<void> {
+  async import(collections: string[], importSettingsMap: Map<string, ImportSettings>): Promise<void> {
     await this.preImport();
     await this.preImport();
 
 
     // init status object
     // init status object
@@ -147,7 +149,10 @@ export class ImportService {
 
 
     // process serially so as not to waste memory
     // process serially so as not to waste memory
     const promises = collections.map((collectionName) => {
     const promises = collections.map((collectionName) => {
-      const importSettings = importSettingsMap[collectionName];
+      const importSettings = importSettingsMap.get(collectionName);
+      if (importSettings == null) {
+        throw new Error(`ImportSettings for ${collectionName} is not found`);
+      }
       return this.importCollection(collectionName, importSettings);
       return this.importCollection(collectionName, importSettings);
     });
     });
     for await (const promise of promises) {
     for await (const promise of promises) {
@@ -172,6 +177,10 @@ export class ImportService {
     const shouldNormalizePages = currentIsV5Compatible && isImportPagesCollection;
     const shouldNormalizePages = currentIsV5Compatible && isImportPagesCollection;
 
 
     if (shouldNormalizePages) await this.crowi.pageService.normalizeAllPublicPages();
     if (shouldNormalizePages) await this.crowi.pageService.normalizeAllPublicPages();
+
+    // Release caches after import process
+    this.modelCache.clear();
+    this.convertMap = undefined;
   }
   }
 
 
   /**
   /**
@@ -183,13 +192,7 @@ export class ImportService {
     if (this.currentProgressingStatus == null) {
     if (this.currentProgressingStatus == null) {
       throw new Error('Something went wrong: currentProgressingStatus is not initialized');
       throw new Error('Something went wrong: currentProgressingStatus is not initialized');
     }
     }
-
-    // prepare functions invoked from custom streams
-    const convertDocuments = this.convertDocuments.bind(this);
-    const bulkOperate = this.bulkOperate.bind(this);
-    const execUnorderedBulkOpSafely = this.execUnorderedBulkOpSafely.bind(this);
-    const emitProgressEvent = this.emitProgressEvent.bind(this);
-
+    // Avoid closure references by passing direct method references
     const collection = mongoose.connection.collection(collectionName);
     const collection = mongoose.connection.collection(collectionName);
 
 
     const { mode, jsonFileName, overwriteParams } = importSettings;
     const { mode, jsonFileName, overwriteParams } = importSettings;
@@ -215,52 +218,71 @@ export class ImportService {
       // stream 3
       // stream 3
       const convertStream = new Transform({
       const convertStream = new Transform({
         objectMode: true,
         objectMode: true,
-        transform(doc, encoding, callback) {
-          const converted = convertDocuments(collectionName, doc, overwriteParams);
-          this.push(converted);
-          callback();
+        transform(this: Transform, doc, encoding, callback) {
+          try {
+          // Direct reference to convertDocuments
+            const converted = (importSettings as any).service.convertDocuments(collectionName, doc, overwriteParams);
+            this.push(converted);
+            callback();
+          }
+          catch (error) {
+            callback(error);
+          }
         },
         },
       });
       });
+      // Reference for importService within Transform
+      (importSettings as any).service = this;
 
 
       // stream 4
       // stream 4
       const batchStream = createBatchStream(BULK_IMPORT_SIZE);
       const batchStream = createBatchStream(BULK_IMPORT_SIZE);
-
-      // stream 5
       const writeStream = new Writable({
       const writeStream = new Writable({
         objectMode: true,
         objectMode: true,
-        async write(batch, encoding, callback) {
-          const unorderedBulkOp = collection.initializeUnorderedBulkOp();
-
-          // documents are not persisted until unorderedBulkOp.execute()
-          batch.forEach((document) => {
-            bulkOperate(unorderedBulkOp, collectionName, document, importSettings);
-          });
-
-          // exec
-          const { result, errors } = await execUnorderedBulkOpSafely(unorderedBulkOp);
-          const { insertedCount, modifiedCount } = result;
-          const errorCount = errors?.length ?? 0;
-
-          logger.debug(`Importing ${collectionName}. Inserted: ${insertedCount}. Modified: ${modifiedCount}. Failed: ${errorCount}.`);
-
-          const increment = insertedCount + modifiedCount + errorCount;
-          collectionProgress.currentCount += increment;
-          collectionProgress.totalCount += increment;
-          collectionProgress.insertedCount += insertedCount;
-          collectionProgress.modifiedCount += modifiedCount;
-
-          emitProgressEvent(collectionProgress, errors);
-
+        write: async(batch, encoding, callback) => {
           try {
           try {
+            const unorderedBulkOp = collection.initializeUnorderedBulkOp();
+            // documents are not persisted until unorderedBulkOp.execute()
+            batch.forEach((document) => {
+              this.bulkOperate(unorderedBulkOp, collectionName, document, importSettings);
+            });
+
+            // exec
+            const { result, errors } = await this.execUnorderedBulkOpSafely(unorderedBulkOp);
+            const {
+              insertedCount, modifiedCount, upsertedCount, matchedCount,
+            } = result;
+            const errorCount = errors?.length ?? 0;
+
+            // For upsert operations, count matched documents as modified
+            const actualModifiedCount = importSettings.mode === ImportMode.upsert
+              ? (matchedCount || 0) // In upsert mode, matchedCount indicates documents that were found and potentially updated
+              : modifiedCount;
+
+            const actualInsertedCount = importSettings.mode === ImportMode.upsert
+              ? (upsertedCount || 0) // In upsert mode, upsertedCount indicates newly created documents
+              : insertedCount;
+
+            logger.debug(`Importing ${collectionName}. Inserted: ${actualInsertedCount}. Modified: ${actualModifiedCount}. Failed: ${errorCount}.`
+              + ` (Raw: inserted=${insertedCount}, modified=${modifiedCount}, upserted=${upsertedCount}, matched=${matchedCount})`);
+            const increment = actualInsertedCount + actualModifiedCount + errorCount;
+            collectionProgress.currentCount += increment;
+            collectionProgress.totalCount += increment;
+            collectionProgress.insertedCount += actualInsertedCount;
+            collectionProgress.modifiedCount += actualModifiedCount;
+            this.emitProgressEvent(collectionProgress, errors);
             // First aid to prevent unexplained memory leaks
             // First aid to prevent unexplained memory leaks
-            logger.info('global.gc() invoked.');
-            gc();
+            try {
+              logger.info('global.gc() invoked.');
+              gc();
+            }
+            catch (err) {
+              logger.error('fail garbage collection: ', err);
+            }
+            callback();
           }
           }
           catch (err) {
           catch (err) {
-            logger.error('fail garbage collection: ', err);
+            logger.error('Error in writeStream:', err);
+            callback(err);
           }
           }
-
-          callback();
         },
         },
         final(callback) {
         final(callback) {
           logger.info(`Importing ${collectionName} has completed.`);
           logger.info(`Importing ${collectionName} has completed.`);
@@ -268,7 +290,13 @@ export class ImportService {
         },
         },
       });
       });
 
 
-      await pipelinePromise(readStream, jsonStream, convertStream, batchStream, writeStream);
+      await pipeline(readStream, jsonStream, convertStream, batchStream, writeStream);
+
+      // Ensure final progress event is emitted even when no data was processed
+      if (collectionProgress.currentCount === 0) {
+        logger.info(`No data processed for collection ${collectionName}. Emitting final progress event.`);
+        this.emitProgressEvent(collectionProgress, null);
+      }
 
 
       // clean up tmp directory
       // clean up tmp directory
       fs.unlinkSync(jsonFile);
       fs.unlinkSync(jsonFile);
@@ -276,15 +304,9 @@ export class ImportService {
     catch (err) {
     catch (err) {
       throw new ImportingCollectionError(collectionProgress, err);
       throw new ImportingCollectionError(collectionProgress, err);
     }
     }
-
   }
   }
 
 
-  /**
-   *
-   * @param {string} collectionName
-   * @param {importSettings} importSettings
-   */
-  validateImportSettings(collectionName, importSettings) {
+  validateImportSettings(collectionName: string, importSettings: ImportSettings): void {
     const { mode } = importSettings;
     const { mode } = importSettings;
 
 
     switch (collectionName) {
     switch (collectionName) {
@@ -298,15 +320,18 @@ export class ImportService {
 
 
   /**
   /**
    * process bulk operation
    * process bulk operation
-   * @param bulk MongoDB Bulk instance
-   * @param collectionName collection name
    */
    */
-  bulkOperate(bulk, collectionName: string, document, importSettings: ImportSettings) {
+  bulkOperate(
+      bulk: UnorderedBulkOperation,
+      collectionName: string,
+      document: Record<string, unknown>,
+      importSettings: ImportSettings,
+  ): BulkOperationBase | void {
     // insert
     // insert
     if (importSettings.mode !== ImportMode.upsert) {
     if (importSettings.mode !== ImportMode.upsert) {
+      // Optimization such as splitting and adding large documents can be considered
       return bulk.insert(document);
       return bulk.insert(document);
     }
     }
-
     // upsert
     // upsert
     switch (collectionName) {
     switch (collectionName) {
       case 'pages':
       case 'pages':
@@ -321,7 +346,7 @@ export class ImportService {
    * @param {CollectionProgress} collectionProgress
    * @param {CollectionProgress} collectionProgress
    * @param {object} appendedErrors key: collection name, value: array of error object
    * @param {object} appendedErrors key: collection name, value: array of error object
    */
    */
-  emitProgressEvent(collectionProgress, appendedErrors) {
+  emitProgressEvent(collectionProgress: CollectionProgress, appendedErrors: any): void {
     const { collectionName } = collectionProgress;
     const { collectionName } = collectionProgress;
 
 
     // send event (in progress in global)
     // send event (in progress in global)
@@ -331,7 +356,7 @@ export class ImportService {
   /**
   /**
    * emit terminate event
    * emit terminate event
    */
    */
-  emitTerminateEvent() {
+  emitTerminateEvent(): void {
     this.adminEvent.emit('onTerminateForImport');
     this.adminEvent.emit('onTerminateForImport');
   }
   }
 
 
@@ -342,13 +367,12 @@ export class ImportService {
    * @param {string} zipFile absolute path to zip file
    * @param {string} zipFile absolute path to zip file
    * @return {Array.<string>} array of absolute paths to extracted files
    * @return {Array.<string>} array of absolute paths to extracted files
    */
    */
-  async unzip(zipFile) {
+  async unzip(zipFile: string): Promise<string[]> {
     const readStream = fs.createReadStream(zipFile);
     const readStream = fs.createReadStream(zipFile);
     const parseStream = unzipStream.Parse();
     const parseStream = unzipStream.Parse();
-    const unzipEntryStream = pipeline(readStream, parseStream, () => {});
-    const files: string[] = [];
+    const entryPromises: Promise<string | null>[] = [];
 
 
-    unzipEntryStream.on('entry', (/** @type {Entry} */ entry) => {
+    parseStream.on('entry', (/** @type {Entry} */ entry) => {
       const fileName = entry.path;
       const fileName = entry.path;
       // https://regex101.com/r/mD4eZs/6
       // https://regex101.com/r/mD4eZs/6
       // prevent from unexpecting attack doing unzip file (path traversal attack)
       // prevent from unexpecting attack doing unzip file (path traversal attack)
@@ -356,6 +380,7 @@ export class ImportService {
       // ../../src/server/example.html
       // ../../src/server/example.html
       if (fileName.match(/(\.\.\/|\.\.\\)/)) {
       if (fileName.match(/(\.\.\/|\.\.\\)/)) {
         logger.error('File path is not appropriate.', fileName);
         logger.error('File path is not appropriate.', fileName);
+        entry.autodrain();
         return;
         return;
       }
       }
 
 
@@ -364,16 +389,28 @@ export class ImportService {
         entry.autodrain();
         entry.autodrain();
       }
       }
       else {
       else {
-        const jsonFile = path.join(this.baseDir, fileName);
-        const writeStream = fs.createWriteStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
-        pipeline(entry, writeStream, () => {});
-        files.push(jsonFile);
+        const entryPromise = new Promise<string | null>((resolve) => {
+          const jsonFile = path.join(this.baseDir, fileName);
+          const writeStream = fs.createWriteStream(jsonFile, { encoding: this.growiBridgeService.getEncoding() });
+
+          pipeline(entry, writeStream)
+            .then(() => resolve(jsonFile))
+            .catch((err) => {
+              logger.error('Failed to extract entry:', err);
+              resolve(null); // Continue processing other entries
+            });
+        });
+
+        entryPromises.push(entryPromise);
       }
       }
     });
     });
 
 
-    await finished(unzipEntryStream);
+    await pipeline(readStream, parseStream);
+    const results = await Promise.allSettled(entryPromises);
 
 
-    return files;
+    return results
+      .filter((result): result is PromiseFulfilledResult<string> => result.status === 'fulfilled' && result.value !== null)
+      .map(result => result.value);
   }
   }
 
 
   /**
   /**
@@ -414,32 +451,32 @@ export class ImportService {
    * @returns document to be persisted
    * @returns document to be persisted
    */
    */
   convertDocuments<D extends Document>(collectionName: string, document: D, overwriteParams: OverwriteParams): D {
   convertDocuments<D extends Document>(collectionName: string, document: D, overwriteParams: OverwriteParams): D {
-    const Model = getModelFromCollectionName(collectionName);
-    const schema = (Model != null) ? Model.schema : undefined;
-    const convertMap = this.convertMap[collectionName];
+  // Model and schema cache (optimization)
+    if (!this.modelCache) {
+      this.modelCache = new Map();
+    }
 
 
-    const _document: D = structuredClone(document);
+    let modelInfo = this.modelCache.get(collectionName);
+    if (!modelInfo) {
+      const Model = getModelFromCollectionName(collectionName);
+      const schema = (Model != null) ? Model.schema : undefined;
+      modelInfo = { Model, schema };
+      this.modelCache.set(collectionName, modelInfo);
+    }
 
 
-    // apply keepOriginal to all of properties
-    Object.entries(document).forEach(([propertyName, value]) => {
-      _document[propertyName] = keepOriginal(value, { document, propertyName });
-    });
+    const { schema } = modelInfo;
+    const convertMap = this.convertMap?.[collectionName];
 
 
-    // Mongoose Model
-    if (convertMap != null) {
-      // assign value from documents being imported
-      Object.entries(convertMap).forEach(([propertyName, convertedValue]) => {
-        const value = document[propertyName];
+    // Use shallow copy instead of structuredClone() when sufficient
+    const _document: D = (typeof document === 'object' && document !== null && !Array.isArray(document)) ? { ...document } : structuredClone(document);
 
 
-        // distinguish between null and undefined
-        if (value === undefined) {
-          return; // next entry
-        }
+    Object.entries(document).forEach(([propertyName, value]) => {
+      // Check if there's a custom convert function for this property, otherwise use keepOriginal
+      const convertedValue = convertMap?.[propertyName];
+      const convertFunc = (convertedValue != null && typeof convertedValue === 'function') ? convertedValue : keepOriginal;
 
 
-        const convertFunc = (typeof convertedValue === 'function') ? convertedValue : null;
-        _document[propertyName] = (convertFunc != null) ? convertFunc(value, { document, propertyName, schema }) : convertedValue;
-      });
-    }
+      _document[propertyName] = convertFunc(value, { document, propertyName, schema });
+    });
 
 
     // overwrite documents with custom values
     // overwrite documents with custom values
     Object.entries(overwriteParams).forEach(([propertyName, overwriteValue]) => {
     Object.entries(overwriteParams).forEach(([propertyName, overwriteValue]) => {
@@ -451,7 +488,6 @@ export class ImportService {
         _document[propertyName] = (overwriteFunc != null) ? overwriteFunc(value, { document: _document, propertyName, schema }) : overwriteValue;
         _document[propertyName] = (overwriteFunc != null) ? overwriteFunc(value, { document: _document, propertyName, schema }) : overwriteValue;
       }
       }
     });
     });
-
     return _document;
     return _document;
   }
   }
 
 
@@ -463,7 +499,7 @@ export class ImportService {
    * @memberOf ImportService
    * @memberOf ImportService
    * @param {object} meta meta data from meta.json
    * @param {object} meta meta data from meta.json
    */
    */
-  validate(meta) {
+  validate(meta: any): void {
     if (meta.version !== getGrowiVersion()) {
     if (meta.version !== getGrowiVersion()) {
       throw new Error('The version of this GROWI and the uploaded GROWI data are not the same');
       throw new Error('The version of this GROWI and the uploaded GROWI data are not the same');
     }
     }
@@ -476,7 +512,7 @@ export class ImportService {
   /**
   /**
    * Delete all uploaded files
    * Delete all uploaded files
    */
    */
-  deleteAllZipFiles() {
+  deleteAllZipFiles(): void {
     fs.readdirSync(this.baseDir)
     fs.readdirSync(this.baseDir)
       .filter(file => path.extname(file) === '.zip')
       .filter(file => path.extname(file) === '.zip')
       .forEach(file => fs.unlinkSync(path.join(this.baseDir, file)));
       .forEach(file => fs.unlinkSync(path.join(this.baseDir, file)));

+ 34 - 8
apps/app/src/server/service/search-delegator/elasticsearch.ts

@@ -23,7 +23,9 @@ import { configManager } from '../config-manager';
 import type { UpdateOrInsertPagesOpts } from '../interfaces/search';
 import type { UpdateOrInsertPagesOpts } from '../interfaces/search';
 
 
 import { aggregatePipelineToIndex } from './aggregate-to-index';
 import { aggregatePipelineToIndex } from './aggregate-to-index';
-import type { AggregatedPage, BulkWriteBody, BulkWriteCommand } from './bulk-write';
+import type {
+  AggregatedPage, BulkWriteBody, BulkWriteCommand, BulkWriteBodyRestriction,
+} from './bulk-write';
 import {
 import {
   getClient,
   getClient,
   isES7ClientDelegator,
   isES7ClientDelegator,
@@ -75,6 +77,10 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
 
 
   private indexName: string;
   private indexName: string;
 
 
+  private pageModel?: PageModel;
+
+  private userModel?: typeof mongoose.Model;
+
   constructor(socketIoService: SocketIoService) {
   constructor(socketIoService: SocketIoService) {
     this.name = SearchDelegatorName.DEFAULT;
     this.name = SearchDelegatorName.DEFAULT;
     this.socketIoService = socketIoService;
     this.socketIoService = socketIoService;
@@ -92,6 +98,26 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
     this.isElasticsearchReindexOnBoot = configManager.getConfig('app:elasticsearchReindexOnBoot');
     this.isElasticsearchReindexOnBoot = configManager.getConfig('app:elasticsearchReindexOnBoot');
   }
   }
 
 
+  /**
+   * Get Page model with proper typing
+   */
+  private getPageModel(): PageModel {
+    if (!this.pageModel) {
+      this.pageModel = mongoose.model<IPage, PageModel>('Page');
+    }
+    return this.pageModel;
+  }
+
+  /**
+   * Get User model with proper typing
+   */
+  private getUserModel() {
+    if (!this.userModel) {
+      this.userModel = mongoose.model('User');
+    }
+    return this.userModel;
+  }
+
   get aliasName(): string {
   get aliasName(): string {
     return `${this.indexName}-alias`;
     return `${this.indexName}-alias`;
   }
   }
@@ -359,7 +385,7 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
   /**
   /**
    * generate object that is related to page.grant*
    * generate object that is related to page.grant*
    */
    */
-  generateDocContentsRelatedToRestriction(page: AggregatedPage) {
+  generateDocContentsRelatedToRestriction(page: AggregatedPage): BulkWriteBodyRestriction {
     const grantedUserIds = page.grantedUsers.map(user => getIdStringForRef(user));
     const grantedUserIds = page.grantedUsers.map(user => getIdStringForRef(user));
     const grantedGroupIds = page.grantedGroups.map(group => getIdStringForRef(group.item));
     const grantedGroupIds = page.grantedGroups.map(group => getIdStringForRef(group.item));
 
 
@@ -416,17 +442,17 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
   }
   }
 
 
   addAllPages() {
   addAllPages() {
-    const Page = mongoose.model('Page');
+    const Page = this.getPageModel();
     return this.updateOrInsertPages(() => Page.find(), { shouldEmitProgress: true, invokeGarbageCollection: true });
     return this.updateOrInsertPages(() => Page.find(), { shouldEmitProgress: true, invokeGarbageCollection: true });
   }
   }
 
 
   updateOrInsertPageById(pageId) {
   updateOrInsertPageById(pageId) {
-    const Page = mongoose.model('Page');
+    const Page = this.getPageModel();
     return this.updateOrInsertPages(() => Page.findById(pageId));
     return this.updateOrInsertPages(() => Page.findById(pageId));
   }
   }
 
 
   updateOrInsertDescendantsPagesById(page, user) {
   updateOrInsertDescendantsPagesById(page, user) {
-    const Page = mongoose.model('Page') as unknown as PageModel;
+    const Page = this.getPageModel();
     const { PageQueryBuilder } = Page;
     const { PageQueryBuilder } = Page;
     const builder = new PageQueryBuilder(Page.find());
     const builder = new PageQueryBuilder(Page.find());
     builder.addConditionToListWithDescendants(page.path);
     builder.addConditionToListWithDescendants(page.path);
@@ -439,7 +465,7 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
   async updateOrInsertPages(queryFactory, option: UpdateOrInsertPagesOpts = {}): Promise<void> {
   async updateOrInsertPages(queryFactory, option: UpdateOrInsertPagesOpts = {}): Promise<void> {
     const { shouldEmitProgress = false, invokeGarbageCollection = false } = option;
     const { shouldEmitProgress = false, invokeGarbageCollection = false } = option;
 
 
-    const Page = mongoose.model<IPage, PageModel>('Page');
+    const Page = this.getPageModel();
     const { PageQueryBuilder } = Page;
     const { PageQueryBuilder } = Page;
 
 
     const socket = shouldEmitProgress ? this.socketIoService.getAdminSocket() : null;
     const socket = shouldEmitProgress ? this.socketIoService.getAdminSocket() : null;
@@ -827,7 +853,7 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
       throw new Error('query.body.query.bool is not initialized');
       throw new Error('query.body.query.bool is not initialized');
     }
     }
 
 
-    const Page = mongoose.model('Page') as unknown as PageModel;
+    const Page = this.getPageModel();
     const {
     const {
       GRANT_PUBLIC, GRANT_SPECIFIED, GRANT_OWNER, GRANT_USER_GROUP,
       GRANT_PUBLIC, GRANT_SPECIFIED, GRANT_OWNER, GRANT_USER_GROUP,
     } = Page;
     } = Page;
@@ -886,7 +912,7 @@ class ElasticsearchDelegator implements SearchDelegator<Data, ESTermsKey, ESQuer
   }
   }
 
 
   async appendFunctionScore(query, queryString): Promise<void> {
   async appendFunctionScore(query, queryString): Promise<void> {
-    const User = mongoose.model('User');
+    const User = this.getUserModel();
     const count = await User.count({}) || 1;
     const count = await User.count({}) || 1;
 
 
     const minScore = queryString.length * 0.1 - 1; // increase with length
     const minScore = queryString.length * 0.1 - 1; // increase with length

+ 3 - 4
apps/app/src/utils/project-dir-utils.ts → apps/app/src/server/util/project-dir-utils.ts

@@ -1,9 +1,8 @@
-/* eslint-disable import/prefer-default-export */
+import fs from 'node:fs';
+import path from 'node:path';
+import process from 'node:process';
 
 
 import { isServer } from '@growi/core/dist/utils/browser-utils';
 import { isServer } from '@growi/core/dist/utils/browser-utils';
-import fs from 'fs';
-import path from 'path';
-import process from 'process';
 
 
 const isCurrentDirRoot = isServer() && fs.existsSync('./next.config.js');
 const isCurrentDirRoot = isServer() && fs.existsSync('./next.config.js');
 
 

+ 1 - 2
apps/app/src/stores-universal/context.tsx

@@ -1,8 +1,7 @@
+import type EventEmitter from 'node:events';
 import type { ColorScheme, IUserHasId } from '@growi/core';
 import type { ColorScheme, IUserHasId } from '@growi/core';
-
 import { AcceptedUploadFileType } from '@growi/core';
 import { AcceptedUploadFileType } from '@growi/core';
 import { useSWRStatic } from '@growi/core/dist/swr';
 import { useSWRStatic } from '@growi/core/dist/swr';
-import type EventEmitter from 'events';
 import type { SWRResponse } from 'swr';
 import type { SWRResponse } from 'swr';
 import useSWR from 'swr';
 import useSWR from 'swr';
 import useSWRImmutable from 'swr/immutable';
 import useSWRImmutable from 'swr/immutable';

+ 1 - 0
apps/app/src/stores-universal/use-context-swr.tsx

@@ -1,4 +1,5 @@
 import { useSWRStatic } from '@growi/core/dist/swr';
 import { useSWRStatic } from '@growi/core/dist/swr';
+// biome-ignore lint/style/useNodejsImportProtocol: ignore
 import assert from 'assert';
 import assert from 'assert';
 import type { Key, SWRConfiguration, SWRResponse } from 'swr';
 import type { Key, SWRConfiguration, SWRResponse } from 'swr';
 
 

+ 2 - 2
apps/app/src/utils/next.config.utils.js

@@ -1,7 +1,7 @@
 // workaround by https://github.com/martpie/next-transpile-modules/issues/143#issuecomment-817467144
 // workaround by https://github.com/martpie/next-transpile-modules/issues/143#issuecomment-817467144
 
 
-const fs = require('fs');
-const path = require('path');
+const fs = require('node:fs');
+const path = require('node:path');
 
 
 const nodeModulesPaths = [
 const nodeModulesPaths = [
   path.resolve(__dirname, '../../node_modules'),
   path.resolve(__dirname, '../../node_modules'),

+ 1 - 1
apps/app/test/integration/models/page-redirect.test.js

@@ -3,7 +3,7 @@ import mongoose from 'mongoose';
 import { getInstance } from '../setup-crowi';
 import { getInstance } from '../setup-crowi';
 
 
 describe('PageRedirect', () => {
 describe('PageRedirect', () => {
-  // eslint-disable-next-line no-unused-vars
+  // biome-ignore lint/correctness/noUnusedVariables: ignore
   let crowi;
   let crowi;
   let PageRedirect;
   let PageRedirect;
 
 

+ 1 - 1
apps/app/test/integration/models/page.test.js

@@ -9,7 +9,7 @@ let testGroup0;
 let parentPage;
 let parentPage;
 
 
 describe('Page', () => {
 describe('Page', () => {
-  // eslint-disable-next-line no-unused-vars
+  // biome-ignore lint/correctness/noUnusedVariables: ignore
   let crowi;
   let crowi;
   let Page;
   let Page;
   let PageQueryBuilder;
   let PageQueryBuilder;

+ 1 - 1
apps/app/test/integration/models/user.test.js

@@ -3,7 +3,7 @@ const mongoose = require('mongoose');
 const { getInstance } = require('../setup-crowi');
 const { getInstance } = require('../setup-crowi');
 
 
 describe('User', () => {
 describe('User', () => {
-  // eslint-disable-next-line no-unused-vars
+  // biome-ignore lint/correctness/noUnusedVariables: ignore
   let crowi;
   let crowi;
   let User;
   let User;
 
 

+ 1 - 1
apps/app/test/integration/setup-crowi.ts

@@ -1,4 +1,4 @@
-import { Server } from 'http';
+import { Server } from 'node:http';
 
 
 import Crowi from '../../src/server/crowi';
 import Crowi from '../../src/server/crowi';
 import { setupModelsDependentOnCrowi } from '../../src/server/crowi/setup-models';
 import { setupModelsDependentOnCrowi } from '../../src/server/crowi/setup-models';

+ 2 - 2
apps/pdf-converter/docker/Dockerfile

@@ -6,7 +6,7 @@ ARG PNPM_HOME="/root/.local/share/pnpm"
 ##
 ##
 ## base
 ## base
 ##
 ##
-FROM node:22-slim AS base
+FROM node:20-slim AS base
 
 
 ARG OPT_DIR
 ARG OPT_DIR
 ARG PNPM_HOME
 ARG PNPM_HOME
@@ -63,7 +63,7 @@ RUN tar -zcf /tmp/packages.tar.gz \
 ##
 ##
 ## release
 ## release
 ##
 ##
-FROM node:22-slim
+FROM node:20-slim
 LABEL maintainer="Yuki Takei <yuki@weseek.co.jp>"
 LABEL maintainer="Yuki Takei <yuki@weseek.co.jp>"
 
 
 ARG OPT_DIR
 ARG OPT_DIR

+ 1 - 1
apps/pdf-converter/package.json

@@ -48,7 +48,7 @@
     "@types/connect": "^3.4.38",
     "@types/connect": "^3.4.38",
     "@types/express": "^4.17.21",
     "@types/express": "^4.17.21",
     "@types/multer": "^1.4.12",
     "@types/multer": "^1.4.12",
-    "@types/node": "^22.5.4",
+    "@types/node": "^20.18.3",
     "@types/supertest": "^6.0.3",
     "@types/supertest": "^6.0.3",
     "supertest": "^7.1.1",
     "supertest": "^7.1.1",
     "unplugin-swc": "^1.5.3"
     "unplugin-swc": "^1.5.3"

+ 2 - 2
apps/slackbot-proxy/docker/Dockerfile

@@ -3,7 +3,7 @@
 ##
 ##
 ## base
 ## base
 ##
 ##
-FROM node:22-slim AS base
+FROM node:20-slim AS base
 
 
 ENV optDir="/opt"
 ENV optDir="/opt"
 
 
@@ -52,7 +52,7 @@ RUN tar -zcf packages.tar.gz \
 ##
 ##
 ## release
 ## release
 ##
 ##
-FROM node:22-slim
+FROM node:20-slim
 LABEL maintainer="Yuki Takei <yuki@weseek.co.jp>"
 LABEL maintainer="Yuki Takei <yuki@weseek.co.jp>"
 
 
 ENV NODE_ENV="production"
 ENV NODE_ENV="production"

+ 1 - 1
apps/slackbot-proxy/package.json

@@ -1,6 +1,6 @@
 {
 {
   "name": "@growi/slackbot-proxy",
   "name": "@growi/slackbot-proxy",
-  "version": "7.3.1-slackbot-proxy.0",
+  "version": "7.3.2-slackbot-proxy.0",
   "license": "MIT",
   "license": "MIT",
   "private": "true",
   "private": "true",
   "scripts": {
   "scripts": {

+ 7 - 2
biome.json

@@ -14,6 +14,7 @@
       "!**/.vscode/**",
       "!**/.vscode/**",
       "!**/turbo.json",
       "!**/turbo.json",
       "!**/.next/**",
       "!**/.next/**",
+      "!**/.terraform/**",
       "!bin/**",
       "!bin/**",
       "!tsconfig.base.json",
       "!tsconfig.base.json",
       "!**/.devcontainer/**",
       "!**/.devcontainer/**",
@@ -37,7 +38,8 @@
       "!apps/app/src/stores/**",
       "!apps/app/src/stores/**",
       "!apps/app/src/styles/**",
       "!apps/app/src/styles/**",
       "!apps/app/test/integration/service/**",
       "!apps/app/test/integration/service/**",
-      "!apps/app/test-with-vite/**"
+      "!apps/app/test-with-vite/**",
+      "!apps/app/tmp/**"
     ]
     ]
   },
   },
   "formatter": {
   "formatter": {
@@ -67,7 +69,10 @@
   },
   },
   "overrides": [
   "overrides": [
     {
     {
-      "includes": ["apps/pdf-converter/**", "./apps/slackbot-proxy/**"],
+      "includes": [
+        "apps/pdf-converter/**",
+        "./apps/slackbot-proxy/**"
+      ],
       "linter": {
       "linter": {
         "rules": {
         "rules": {
           "style": {
           "style": {

+ 3 - 3
package.json

@@ -1,6 +1,6 @@
 {
 {
   "name": "growi",
   "name": "growi",
-  "version": "7.3.1-RC.0",
+  "version": "7.3.2-RC.0",
   "description": "Team collaboration software using markdown",
   "description": "Team collaboration software using markdown",
   "license": "MIT",
   "license": "MIT",
   "private": "true",
   "private": "true",
@@ -53,7 +53,7 @@
     "@types/css-modules": "^1.0.2",
     "@types/css-modules": "^1.0.2",
     "@types/eslint": "^8.37.0",
     "@types/eslint": "^8.37.0",
     "@types/estree": "^1.0.1",
     "@types/estree": "^1.0.1",
-    "@types/node": "^20.14.0",
+    "@types/node": "^20.18.3",
     "@types/path-browserify": "^1.0.0",
     "@types/path-browserify": "^1.0.0",
     "@typescript-eslint/eslint-plugin": "^5.59.7",
     "@typescript-eslint/eslint-plugin": "^5.59.7",
     "@typescript-eslint/parser": "^5.59.7",
     "@typescript-eslint/parser": "^5.59.7",
@@ -116,6 +116,6 @@
     }
     }
   },
   },
   "engines": {
   "engines": {
-    "node": "^20 || ^22"
+    "node": "^18 || ^20"
   }
   }
 }
 }

+ 1 - 1
packages/presentation/src/client/components/GrowiSlides.tsx

@@ -30,7 +30,7 @@ export const GrowiSlides = (props: Props): JSX.Element => {
     rendererOptions.remarkPlugins == null ||
     rendererOptions.remarkPlugins == null ||
     rendererOptions.components == null
     rendererOptions.components == null
   ) {
   ) {
-    return <></>;
+    return;
   }
   }
 
 
   rendererOptions.remarkPlugins.push([
   rendererOptions.remarkPlugins.push([

+ 1 - 1
packages/presentation/src/client/services/renderer/extract-sections.ts

@@ -61,7 +61,7 @@ export const remarkPlugin: Plugin<[ExtractSectionsPluginParams]> = (
 
 
   return (tree) => {
   return (tree) => {
     // wrap with <section>
     // wrap with <section>
-    visit(tree, startCondition, (node, index, parent: Parent) => {
+    visit(tree, startCondition, (node, _index, parent: Parent) => {
       if (parent == null || parent.type !== 'root' || node.type === 'yaml') {
       if (parent == null || parent.type !== 'root' || node.type === 'yaml') {
         return;
         return;
       }
       }

+ 1 - 1
packages/remark-attachment-refs/src/client/components/ExtractedAttachments.tsx

@@ -107,7 +107,7 @@ export const ExtractedAttachments = React.memo(
 
 
     // eslint-disable-next-line @typescript-eslint/no-unused-vars
     // eslint-disable-next-line @typescript-eslint/no-unused-vars
     const renderExtractedImage = useCallback(
     const renderExtractedImage = useCallback(
-      (attachment: IAttachmentHasId, index: number) => {
+      (attachment: IAttachmentHasId, _index: number) => {
         const { options } = refsContext;
         const { options } = refsContext;
 
 
         // determine alt
         // determine alt

+ 2 - 2
packages/remark-attachment-refs/src/server/routes/refs.ts

@@ -62,7 +62,7 @@ function addDepthCondition(query, pagePath, optionsDepth) {
 
 
 type RequestWithUser = Request & { user: HydratedDocument<IUser> };
 type RequestWithUser = Request & { user: HydratedDocument<IUser> };
 
 
-const loginRequiredFallback = (req, res) => {
+const loginRequiredFallback = (_req, res) => {
   return res.status(403).send('login required');
   return res.status(403).send('login required');
 };
 };
 
 
@@ -203,7 +203,7 @@ export const routesFactory = (crowi): any => {
 
 
         try {
         try {
           regex = generateRegexp(regexOptionValue);
           regex = generateRegexp(regexOptionValue);
-        } catch (err) {
+        } catch {
           res.status(400).send("the 'regex' option is invalid as RegExp.");
           res.status(400).send("the 'regex' option is invalid as RegExp.");
           return;
           return;
         }
         }

+ 1 - 1
packages/remark-drawio/src/components/DrawioViewer.tsx

@@ -162,7 +162,7 @@ export const DrawioViewer = memo((props: DrawioViewerProps): JSX.Element => {
     }
     }
 
 
     const observer = new ResizeObserver((entries) => {
     const observer = new ResizeObserver((entries) => {
-      for (const entry of entries) {
+      for (const _entry of entries) {
         // setElementWidth(entry.contentRect.width);
         // setElementWidth(entry.contentRect.width);
         onRenderingStart?.();
         onRenderingStart?.();
         renderDrawioWithDebounce();
         renderDrawioWithDebounce();

+ 0 - 8
packages/remark-growi-directive/src/mdast-util-growi-directive/lib/index.js

@@ -22,8 +22,6 @@ import { stringifyEntitiesLight } from 'stringify-entities';
 
 
 const own = {}.hasOwnProperty;
 const own = {}.hasOwnProperty;
 
 
-const shortcut = /^[^\t\n\r "#'.<=>`}]+$/;
-
 export const DirectiveType = Object.freeze({
 export const DirectiveType = Object.freeze({
   Text: 'textGrowiPluginDirective',
   Text: 'textGrowiPluginDirective',
   Leaf: 'leafGrowiPluginDirective',
   Leaf: 'leafGrowiPluginDirective',
@@ -223,12 +221,6 @@ function attributes(node, state) {
   const attrs = node.attributes || {};
   const attrs = node.attributes || {};
   /** @type {Array.<string>} */
   /** @type {Array.<string>} */
   const values = [];
   const values = [];
-  /** @type {string|undefined} */
-  let classesFull;
-  /** @type {string|undefined} */
-  let classes;
-  /** @type {string|undefined} */
-  let id;
   /** @type {string} */
   /** @type {string} */
   let key;
   let key;
 
 

+ 0 - 2
packages/remark-growi-directive/src/micromark-extension-growi-directive/lib/factory-attributes.js

@@ -50,8 +50,6 @@ export function factoryAttributes(
   attributeValueData,
   attributeValueData,
   disallowEol,
   disallowEol,
 ) {
 ) {
-  /** @type {string} */
-  let type;
   /** @type {Code|undefined} */
   /** @type {Code|undefined} */
   let marker;
   let marker;
 
 

+ 1 - 1
packages/remark-lsx/src/client/components/Lsx.tsx

@@ -93,7 +93,7 @@ const LsxSubstance = React.memo(
 
 
     const contents = useMemo(() => {
     const contents = useMemo(() => {
       if (data == null) {
       if (data == null) {
-        return <></>;
+        return;
       }
       }
 
 
       const depthRange = lsxContext.getOptDepth();
       const depthRange = lsxContext.getOptDepth();

Некоторые файлы не были показаны из-за большого количества измененных файлов