Bläddra i källkod

Merge remote-tracking branch 'origin/master' into feat/178240-suggest-path-spec

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
VANELLOPE\tomoyuki-t 1 vecka sedan
förälder
incheckning
34418192e2
100 ändrade filer med 5628 tillägg och 434 borttagningar
  1. 2 0
      .claude/settings.json
  2. 46 1
      .claude/skills/monorepo-overview/SKILL.md
  3. 2 2
      .github/workflows/ci-app-prod.yml
  4. 268 0
      .kiro/specs/collaborative-editor/design.md
  5. 79 0
      .kiro/specs/collaborative-editor/requirements.md
  6. 69 0
      .kiro/specs/collaborative-editor/research.md
  7. 22 0
      .kiro/specs/collaborative-editor/spec.json
  8. 3 0
      .kiro/specs/collaborative-editor/tasks.md
  9. 0 58
      .kiro/specs/optimize-presentation/requirements.md
  10. 0 49
      .kiro/specs/optimize-presentation/tasks.md
  11. 1 41
      .kiro/specs/presentation/design.md
  12. 26 0
      .kiro/specs/presentation/requirements.md
  13. 0 0
      .kiro/specs/presentation/research.md
  14. 3 2
      .kiro/specs/presentation/spec.json
  15. 262 0
      .kiro/specs/upgrade-fixed-packages/design.md
  16. 75 0
      .kiro/specs/upgrade-fixed-packages/requirements.md
  17. 183 0
      .kiro/specs/upgrade-fixed-packages/research.md
  18. 22 0
      .kiro/specs/upgrade-fixed-packages/spec.json
  19. 89 0
      .kiro/specs/upgrade-fixed-packages/tasks.md
  20. 5 1
      .kiro/steering/structure.md
  21. 1 1
      apps/app/.claude/rules/package-dependencies.md
  22. 90 0
      apps/app/.claude/skills/learned/fix-broken-next-symlinks/SKILL.md
  23. 9 1
      apps/app/bin/check-next-symlinks.sh
  24. 0 1
      apps/app/next.config.ts
  25. 10 17
      apps/app/package.json
  26. 8 0
      apps/app/public/static/locales/en_US/admin.json
  27. 5 0
      apps/app/public/static/locales/en_US/translation.json
  28. 8 0
      apps/app/public/static/locales/fr_FR/admin.json
  29. 5 0
      apps/app/public/static/locales/fr_FR/translation.json
  30. 8 0
      apps/app/public/static/locales/ja_JP/admin.json
  31. 5 0
      apps/app/public/static/locales/ja_JP/translation.json
  32. 8 0
      apps/app/public/static/locales/ko_KR/admin.json
  33. 5 0
      apps/app/public/static/locales/ko_KR/translation.json
  34. 8 0
      apps/app/public/static/locales/zh_CN/admin.json
  35. 5 0
      apps/app/public/static/locales/zh_CN/translation.json
  36. 21 1
      apps/app/src/client/components/Admin/AuditLog/AuditLogDisableMode.tsx
  37. 182 0
      apps/app/src/client/components/Admin/AuditLog/AuditLogExportModal.tsx
  38. 39 0
      apps/app/src/client/components/Admin/AuditLog/DuplicateExportConfirmModal.tsx
  39. 67 0
      apps/app/src/client/components/Admin/AuditLog/useAuditLogExport.ts
  40. 35 0
      apps/app/src/client/components/Admin/AuditLogManagement.tsx
  41. 11 0
      apps/app/src/client/components/Admin/MarkdownSetting/ContentDispositionSettings.tsx
  42. 98 0
      apps/app/src/client/components/InAppNotification/ModelNotification/AuditLogBulkExportJobModelNotification.tsx
  43. 5 1
      apps/app/src/client/components/InAppNotification/ModelNotification/ModelNotification.tsx
  44. 5 1
      apps/app/src/client/components/InAppNotification/ModelNotification/index.tsx
  45. 13 0
      apps/app/src/client/components/InAppNotification/ModelNotification/useActionAndMsg.ts
  46. 6 9
      apps/app/src/components/Script/DrawioViewerScript/DrawioViewerScript.tsx
  47. 56 0
      apps/app/src/features/audit-log-bulk-export/interfaces/audit-log-bulk-export.ts
  48. 55 0
      apps/app/src/features/audit-log-bulk-export/server/models/audit-log-bulk-export-job.ts
  49. 299 0
      apps/app/src/features/audit-log-bulk-export/server/routes/apiv3/audit-log-bulk-export.integ.ts
  50. 117 0
      apps/app/src/features/audit-log-bulk-export/server/routes/apiv3/audit-log-bulk-export.ts
  51. 1 0
      apps/app/src/features/audit-log-bulk-export/server/routes/apiv3/index.ts
  52. 234 0
      apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-clean-up-cron.integ.ts
  53. 155 0
      apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-clean-up-cron.ts
  54. 751 0
      apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/audit-log-bulk-export-job-cron-service.integ.ts
  55. 11 0
      apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/errors.ts
  56. 297 0
      apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/index.ts
  57. 104 0
      apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/steps/compress-and-upload.ts
  58. 139 0
      apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/steps/exportAuditLogsToFsAsync.ts
  59. 335 0
      apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export.integ.ts
  60. 135 0
      apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export.ts
  61. 42 0
      apps/app/src/features/audit-log-bulk-export/server/service/check-audit-log-bulk-export-job-in-progress-cron.ts
  62. 1 2
      apps/app/src/features/openai/server/services/openai.ts
  63. 9 8
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/compress-and-upload.ts
  64. 17 0
      apps/app/src/interfaces/activity.ts
  65. 12 0
      apps/app/src/interfaces/session-config.ts
  66. 23 14
      apps/app/src/server/crowi/index.ts
  67. 2 0
      apps/app/src/server/interfaces/attachment.ts
  68. 1 2
      apps/app/src/server/models/obsolete-page.js
  69. 7 8
      apps/app/src/server/models/page.ts
  70. 2 0
      apps/app/src/server/routes/apiv3/index.js
  71. 1 2
      apps/app/src/server/routes/apiv3/users.js
  72. 31 20
      apps/app/src/server/service/file-uploader/aws/index.ts
  73. 3 2
      apps/app/src/server/service/in-app-notification.ts
  74. 4 3
      apps/app/src/server/service/in-app-notification/in-app-notification-utils.ts
  75. 1 5
      apps/app/src/server/service/page-grant.ts
  76. 9 18
      apps/app/src/server/service/page/index.ts
  77. 3 0
      apps/app/src/server/service/socket-io/socket-io.ts
  78. 61 20
      apps/app/src/server/service/yjs/create-mongodb-persistence.ts
  79. 159 0
      apps/app/src/server/service/yjs/guard-socket.spec.ts
  80. 30 0
      apps/app/src/server/service/yjs/guard-socket.ts
  81. 2 2
      apps/app/src/server/service/yjs/sync-ydoc.ts
  82. 177 0
      apps/app/src/server/service/yjs/upgrade-handler.spec.ts
  83. 131 0
      apps/app/src/server/service/yjs/upgrade-handler.ts
  84. 205 0
      apps/app/src/server/service/yjs/websocket-connection.integ.ts
  85. 39 0
      apps/app/src/server/service/yjs/y-websocket-server.d.ts
  86. 25 10
      apps/app/src/server/service/yjs/yjs.integ.ts
  87. 72 93
      apps/app/src/server/service/yjs/yjs.ts
  88. 1 1
      apps/app/src/stores-universal/use-next-themes.tsx
  89. 17 7
      apps/app/src/stores/renderer.tsx
  90. 1 1
      apps/app/tsconfig.build.client.json
  91. 2 3
      apps/slackbot-proxy/package.json
  92. 3 3
      package.json
  93. 2 4
      packages/core-styles/package.json
  94. 6 0
      packages/core/CHANGELOG.md
  95. 3 6
      packages/core/package.json
  96. 5 0
      packages/core/src/consts/ydoc-status.ts
  97. 9 0
      packages/core/src/index.ts
  98. 2 2
      packages/core/src/utils/page-path-utils/generate-children-regexp.spec.ts
  99. 1 3
      packages/core/src/utils/page-path-utils/generate-children-regexp.ts
  100. 4 9
      packages/core/src/utils/page-path-utils/index.ts

+ 2 - 0
.claude/settings.json

@@ -11,6 +11,8 @@
       "Bash(pnpm run lint:*)",
       "Bash(pnpm run lint:*)",
       "Bash(pnpm vitest run *)",
       "Bash(pnpm vitest run *)",
       "Bash(pnpm biome check *)",
       "Bash(pnpm biome check *)",
+      "Bash(pnpm ls *)",
+      "Bash(pnpm why *)",
       "Bash(cat *)",
       "Bash(cat *)",
       "Bash(echo *)",
       "Bash(echo *)",
       "Bash(find *)",
       "Bash(find *)",

+ 46 - 1
.claude/skills/monorepo-overview/SKILL.md

@@ -64,6 +64,34 @@ turbo run test --filter @growi/app
 turbo run lint --filter @growi/core
 turbo run lint --filter @growi/core
 ```
 ```
 
 
+### Build Order Management
+
+Build dependencies in this monorepo are **not** declared with `dependsOn: ["^build"]` (the automatic workspace-dependency mode). Instead, they are declared **explicitly** — either in the root `turbo.json` for legacy entries, or in per-package `turbo.json` files for newer packages.
+
+**When to update**: whenever a package gains a new workspace dependency on another buildable package (one that produces a `dist/`), declare the build-order dependency explicitly. Without it, Turborepo may build in the wrong order, causing missing `dist/` files or type errors.
+
+**Pattern — per-package `turbo.json`** (preferred for new dependencies):
+
+```json
+// packages/my-package/turbo.json
+{
+  "extends": ["//"],
+  "tasks": {
+    "build": { "dependsOn": ["@growi/some-dep#build"] },
+    "dev":   { "dependsOn": ["@growi/some-dep#dev"] }
+  }
+}
+```
+
+- `"extends": ["//"]` inherits all root task definitions; only add the extra `dependsOn`
+- Keep root `turbo.json` clean — package-level overrides live with the package that owns the dependency
+- For packages with multiple tasks (watch, lint, test), mirror the dependency in each relevant task
+
+**Existing examples**:
+- `packages/slack/turbo.json` — `build`/`dev` depend on `@growi/logger`
+- `packages/remark-attachment-refs/turbo.json` — all tasks depend on `@growi/core`, `@growi/logger`, `@growi/remark-growi-directive`, `@growi/ui`
+- Root `turbo.json` — `@growi/ui#build` depends on `@growi/core#build` (pre-dates the per-package pattern)
+
 ## Architectural Principles
 ## Architectural Principles
 
 
 ### 1. Feature-Based Architecture (Recommended)
 ### 1. Feature-Based Architecture (Recommended)
@@ -99,11 +127,28 @@ This enables better code splitting and prevents server-only code from being bund
 
 
 Common code should be extracted to `packages/`:
 Common code should be extracted to `packages/`:
 
 
-- **core**: Utilities, constants, type definitions
+- **core**: Domain hub (see below)
 - **ui**: Reusable React components
 - **ui**: Reusable React components
 - **editor**: Markdown editor
 - **editor**: Markdown editor
 - **pluginkit**: Plugin system framework
 - **pluginkit**: Plugin system framework
 
 
+#### @growi/core — Domain & Utilities Hub
+
+`@growi/core` is the foundational shared package depended on by all other packages (10 consumers). Its responsibilities:
+
+- **Domain type definitions** — Single source of truth for cross-package interfaces (`IPage`, `IUser`, `IRevision`, `Ref<T>`, `HasObjectId`, etc.)
+- **Cross-cutting utilities** — Pure functions for page path validation, ObjectId checks, serialization (e.g., `serializeUserSecurely()`)
+- **System constants** — File types, plugin configs, scope enums
+- **Global type augmentations** — Runtime/polyfill type declarations visible to all consumers (e.g., `RegExp.escape()` via `declare global` in `index.ts`)
+
+Key patterns:
+
+1. **Shared types and global augmentations go in `@growi/core`** — Not duplicated per-package. `declare global` in `index.ts` propagates to all consumers through the module graph.
+2. **Subpath exports for granular imports** — `@growi/core/dist/utils/page-path-utils` instead of barrel imports from root.
+3. **Minimal runtime dependencies** — Only `bson-objectid`; ~70% types. Safe to import from both server and client contexts.
+4. **Server-specific interfaces are namespaced** — Under `interfaces/server/`.
+5. **Dual format (ESM + CJS)** — Built via Vite with `preserveModules: true` and `vite-plugin-dts` (`copyDtsFiles: true`).
+
 ## Version Management with Changeset
 ## Version Management with Changeset
 
 
 GROWI uses **Changesets** for version management and release notes:
 GROWI uses **Changesets** for version management and release notes:

+ 2 - 2
.github/workflows/ci-app-prod.yml

@@ -40,7 +40,7 @@ concurrency:
 jobs:
 jobs:
 
 
   # test-prod-node22:
   # test-prod-node22:
-  #   uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@dev/7.5.x
+  #   uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@master
   #   if: |
   #   if: |
   #     ( github.event_name == 'push'
   #     ( github.event_name == 'push'
   #       || github.base_ref == 'master'
   #       || github.base_ref == 'master'
@@ -54,7 +54,7 @@ jobs:
   #     SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
   #     SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
 
 
   test-prod-node24:
   test-prod-node24:
-    uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@dev/7.5.x
+    uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@master
     if: |
     if: |
       ( github.event_name == 'push'
       ( github.event_name == 'push'
         || github.base_ref == 'master'
         || github.base_ref == 'master'

+ 268 - 0
.kiro/specs/collaborative-editor/design.md

@@ -0,0 +1,268 @@
+# Design Document: collaborative-editor
+
+## Overview
+
+**Purpose**: Real-time collaborative editing in GROWI, allowing multiple users to simultaneously edit the same wiki page with automatic conflict resolution via Yjs CRDT.
+
+**Users**: All GROWI users who use real-time collaborative page editing. System operators manage the WebSocket and persistence infrastructure.
+
+**Impact**: Yjs document synchronization over native WebSocket (`y-websocket`), with Socket.IO continuing to serve non-Yjs real-time events (page room broadcasts, notifications).
+
+### Goals
+- Guarantee a single server-side Y.Doc per page — no split-brain desynchronization
+- Provide real-time bidirectional sync for all connected editors
+- Authenticate and authorize WebSocket connections using existing session infrastructure
+- Persist draft state to MongoDB for durability across reconnections and restarts
+- Bridge awareness/presence events to non-editor UI via Socket.IO rooms
+
+### Non-Goals
+- Changing the Yjs document model, CodeMirror integration, or page save/revision logic
+- Migrating Socket.IO-based UI events to WebSocket
+- Changing the `yjs-writings` MongoDB collection schema or data format
+
+## Architecture
+
+### Architecture Diagram
+
+```mermaid
+graph TB
+    subgraph Client
+        CM[CodeMirror Editor]
+        WP[WebsocketProvider]
+        GS[Global Socket.IO Client]
+    end
+
+    subgraph Server
+        subgraph HTTP Server
+            Express[Express App]
+            SIO[Socket.IO Server]
+            WSS[WebSocket Server - ws]
+        end
+
+        subgraph YjsService
+            UpgradeHandler[Upgrade Handler - Auth]
+            ConnHandler[Connection Handler]
+            DocManager[Document Manager - getYDoc]
+            AwarenessBridge[Awareness Bridge]
+        end
+
+        MDB[(MongoDB - yjs-writings)]
+        SessionStore[(Session Store)]
+    end
+
+    CM --> WP
+    WP -->|ws path yjs pageId| WSS
+    GS -->|socket.io| SIO
+
+    WSS -->|upgrade auth| UpgradeHandler
+    UpgradeHandler -->|parse cookie| SessionStore
+    WSS -->|connection| ConnHandler
+    ConnHandler --> DocManager
+    DocManager --> MDB
+
+    AwarenessBridge -->|io.in room .emit| SIO
+
+    DocManager -->|awareness events| AwarenessBridge
+```
+
+**Key architectural properties**:
+- **Dual transport**: WebSocket for Yjs sync (`/yjs/{pageId}`), Socket.IO for UI events (`/socket.io/`)
+- **Singleton YjsService**: Encapsulates all Yjs document management
+- **Atomic document creation**: `map.setIfUndefined` from lib0 — synchronous get-or-create, no race condition window
+- **Session-based auth**: Cookie parsed from HTTP upgrade request, same session store as Express
+
+### Technology Stack
+
+| Layer | Choice / Version | Role |
+|-------|------------------|------|
+| Client Provider | `y-websocket@^2.x` (WebsocketProvider) | Yjs document sync over WebSocket |
+| Server WebSocket | `ws@^8.x` (WebSocket.Server) | Native WebSocket server, `noServer: true` mode |
+| Server Yjs Utils | `y-websocket@^2.x` (`bin/utils`) | `setupWSConnection`, `getYDoc`, `WSSharedDoc` |
+| Persistence | `y-mongodb-provider` (extended) | Yjs document persistence to `yjs-writings` collection |
+| Event Bridge | Socket.IO `io` instance | Awareness state broadcasting to page rooms |
+| Auth | express-session + passport | WebSocket upgrade authentication via cookie |
+
+## System Flows
+
+### Client Connection Flow
+
+```mermaid
+sequenceDiagram
+    participant C as Client Browser
+    participant WSS as WebSocket Server
+    participant UH as Upgrade Handler
+    participant SS as Session Store
+    participant DM as Document Manager
+    participant MDB as MongoDB
+
+    C->>WSS: HTTP Upgrade GET /yjs/pageId
+    WSS->>UH: upgrade event
+    UH->>SS: Parse cookie, load session
+    SS-->>UH: Session with user
+    UH->>UH: Check page access
+    alt Unauthorized
+        UH-->>C: 401/403, destroy socket
+    else Authorized
+        UH->>WSS: handleUpgrade
+        WSS->>DM: setupWSConnection
+        DM->>DM: getYDoc - atomic get or create
+        alt New document
+            DM->>MDB: bindState - load persisted state
+            MDB-->>DM: Y.Doc state
+        end
+        DM-->>C: Sync Step 1 - state vector
+        C-->>DM: Sync Step 2 - diff
+        DM-->>C: Awareness states
+    end
+```
+
+Authentication happens before `handleUpgrade` — unauthorized connections never reach the Yjs layer. Document creation uses `getYDoc`'s atomic `map.setIfUndefined` pattern.
+
+### Document Lifecycle
+
+```mermaid
+stateDiagram-v2
+    [*] --> Created: First client connects
+    Created --> Active: bindState completes
+    Active --> Active: Clients connect/disconnect
+    Active --> Flushing: Last client disconnects
+    Flushing --> [*]: writeState completes, doc destroyed
+    Flushing --> Active: New client connects before destroy
+```
+
+## Components and Interfaces
+
+| Component | Layer | Intent | Key Dependencies |
+|-----------|-------|--------|-----------------|
+| YjsService | Server / Service | Orchestrates Yjs document lifecycle, exposes public API | ws, y-websocket/bin/utils, MongodbPersistence |
+| UpgradeHandler | Server / Auth | Authenticates and authorizes WebSocket upgrade requests | express-session, passport, Page model |
+| guardSocket | Server / Util | Prevents socket closure by other upgrade handlers during async auth | — |
+| PersistenceAdapter | Server / Data | Bridges MongodbPersistence to y-websocket persistence interface | MongodbPersistence, syncYDoc, Socket.IO io |
+| AwarenessBridge | Server / Events | Bridges y-websocket awareness events to Socket.IO rooms | Socket.IO io |
+| use-collaborative-editor-mode | Client / Hook | Manages WebsocketProvider lifecycle and awareness | y-websocket, yjs |
+
+### YjsService
+
+**Intent**: Manages Yjs document lifecycle, WebSocket server setup, and public API for page save/status integration.
+
+**Responsibilities**:
+- Owns the `ws.WebSocketServer` instance and the y-websocket `docs` Map
+- Initializes persistence via y-websocket's `setPersistence`
+- Registers the HTTP `upgrade` handler (delegating auth to UpgradeHandler)
+- Exposes the same public interface as `IYjsService` for downstream consumers
+
+**Service Interface**:
+
+```typescript
+interface IYjsService {
+  getYDocStatus(pageId: string): Promise<YDocStatus>;
+  syncWithTheLatestRevisionForce(
+    pageId: string,
+    editingMarkdownLength?: number,
+  ): Promise<SyncLatestRevisionBody>;
+  getCurrentYdoc(pageId: string): Y.Doc | undefined;
+}
+```
+
+- Constructor accepts `httpServer: http.Server` and `io: Server`
+- Uses `WebSocket.Server({ noServer: true })` + y-websocket utils
+- Uses `httpServer.on('upgrade', ...)` with path check for `/yjs/`
+- **CRITICAL**: Socket.IO server must set `destroyUpgrade: false` to prevent engine.io from destroying non-Socket.IO upgrade requests
+
+### UpgradeHandler
+
+**Intent**: Authenticates WebSocket upgrade requests using session cookies and verifies page access.
+
+**Interface**:
+
+```typescript
+type UpgradeResult =
+  | { authorized: true; request: AuthenticatedRequest; pageId: string }
+  | { authorized: false; statusCode: number };
+```
+
+- Runs express-session and passport middleware via `runMiddleware` helper against raw `IncomingMessage`
+- `writeErrorResponse` writes HTTP status line only — socket cleanup deferred to caller (works with `guardSocket`)
+- Guest access: if `user` is undefined but page allows guest access, authorization proceeds
+
+### guardSocket
+
+**Intent**: Prevents other synchronous upgrade handlers from closing the socket during async auth.
+
+**Why this exists**: Node.js EventEmitter fires all `upgrade` listeners synchronously. When the Yjs async handler yields at its first `await`, Next.js's `NextCustomServer.upgradeHandler` runs and calls `socket.end()` for unrecognized paths. This destroys the socket before Yjs auth completes.
+
+**How it works**: Temporarily replaces `socket.end()` and `socket.destroy()` with no-ops before the first `await`. After auth completes, `restore()` reinstates the original methods.
+
+```typescript
+const guard = guardSocket(socket);
+const result = await handleUpgrade(request, socket, head);
+guard.restore();
+```
+
+### PersistenceAdapter
+
+**Intent**: Adapts MongodbPersistence to y-websocket's persistence interface (`bindState`, `writeState`).
+
+**Interface**:
+
+```typescript
+interface YWebsocketPersistence {
+  bindState: (docName: string, ydoc: Y.Doc) => void;
+  writeState: (docName: string, ydoc: Y.Doc) => Promise<void>;
+  provider: MongodbPersistence;
+}
+```
+
+**Key behavior**:
+- `bindState`: Loads persisted state → determines YDocStatus → calls `syncYDoc` → registers awareness event bridge
+- `writeState`: Flushes document state to MongoDB on last-client disconnect
+- Ordering within `bindState` is guaranteed (persistence load → sync → awareness registration)
+
+### AwarenessBridge
+
+**Intent**: Bridges y-websocket per-document awareness events to Socket.IO room broadcasts.
+
+**Published events** (to Socket.IO rooms):
+- `YjsAwarenessStateSizeUpdated` with `awarenessStateSize: number`
+- `YjsHasYdocsNewerThanLatestRevisionUpdated` with `hasNewerYdocs: boolean`
+
+**Subscribed events** (from y-websocket):
+- `WSSharedDoc.awareness.on('update', ...)` — per-document awareness changes
+
+### use-collaborative-editor-mode (Client Hook)
+
+**Intent**: Manages WebsocketProvider lifecycle, awareness state, and CodeMirror extensions.
+
+**Key details**:
+- WebSocket URL: `${wsProtocol}//${window.location.host}/yjs`, room name: `pageId`
+- Options: `connect: true`, `resyncInterval: 3000`
+- Awareness API: `provider.awareness.setLocalStateField`, `.on('update', ...)`
+- All side effects (provider creation, awareness setup) must be outside React state updaters to avoid render-phase violations
+
+## Data Models
+
+No custom data models. Uses the existing `yjs-writings` MongoDB collection via `MongodbPersistence` (extended `y-mongodb-provider`). Collection schema, indexes, and persistence interface (`bindState` / `writeState`) are unchanged.
+
+## Error Handling
+
+| Error Type | Scenario | Response |
+|------------|----------|----------|
+| Auth Failure | Invalid/expired session cookie | 401 on upgrade, socket destroyed |
+| Access Denied | User lacks page access | 403 on upgrade, socket destroyed |
+| Persistence Error | MongoDB read failure in bindState | Log error, serve empty doc (clients sync from each other) |
+| WebSocket Close | Client network failure | Automatic reconnect with exponential backoff (WebsocketProvider built-in) |
+| Document Not Found | getCurrentYdoc for non-active doc | Return undefined |
+
+## Requirements Traceability
+
+| Requirement | Summary | Components |
+|-------------|---------|------------|
+| 1.1, 1.2 | Single Y.Doc per page | DocumentManager (getYDoc atomic pattern) |
+| 1.3, 1.4, 1.5 | Sync integrity on reconnect | DocumentManager, WebsocketProvider |
+| 2.1, 2.2 | y-websocket transport | YjsService, use-collaborative-editor-mode |
+| 2.3 | Coexist with Socket.IO | UpgradeHandler, guardSocket |
+| 2.4 | resyncInterval | WebsocketProvider |
+| 3.1-3.4 | Auth on upgrade | UpgradeHandler |
+| 4.1-4.5 | MongoDB persistence | PersistenceAdapter |
+| 5.1-5.4 | Awareness and presence | AwarenessBridge, use-collaborative-editor-mode |
+| 6.1-6.4 | YDoc status and sync | YjsService |

+ 79 - 0
.kiro/specs/collaborative-editor/requirements.md

@@ -0,0 +1,79 @@
+# Requirements Document
+
+## Introduction
+
+GROWI provides real-time collaborative editing powered by Yjs, allowing multiple users to simultaneously edit the same wiki page with automatic conflict resolution. The collaborative editing system uses `y-websocket` as the Yjs transport layer over native WebSocket, with MongoDB persistence for draft state and Socket.IO bridging for awareness/presence events to non-editor UI components.
+
+**Scope**: Server-side Yjs document management, client-side Yjs provider, WebSocket authentication, MongoDB persistence integration, and awareness/presence tracking.
+
+**Out of Scope**: The Yjs document model itself, CodeMirror editor integration details, page save/revision logic, or the global Socket.IO infrastructure used for non-Yjs events.
+
+## Requirements
+
+### Requirement 1: Document Synchronization Integrity
+
+**Objective:** As a wiki user editing collaboratively, I want all clients editing the same page to always share a single server-side Y.Doc instance, so that edits are never lost due to document desynchronization.
+
+#### Acceptance Criteria
+
+1. When multiple clients connect to the same page simultaneously, the Yjs Service shall ensure that exactly one Y.Doc instance exists on the server for that page.
+2. When a client connects while another client's document initialization is in progress, the Yjs Service shall return the same Y.Doc instance to both clients without creating a duplicate.
+3. When a client reconnects after a brief network disconnection, the Yjs Service shall synchronize the client with the existing server-side Y.Doc containing all other clients' changes.
+4. While multiple clients are editing the same page, the Yjs Service shall propagate each client's changes to all other connected clients in real time.
+5. If a client's WebSocket connection drops and reconnects, the Yjs Service shall not destroy the server-side Y.Doc while other clients remain connected.
+
+### Requirement 2: WebSocket Transport Layer
+
+**Objective:** As a system operator, I want the collaborative editing transport to use y-websocket over native WebSocket, so that the system benefits from active maintenance and atomic document initialization.
+
+#### Acceptance Criteria
+
+1. The Yjs Service shall use `y-websocket` server utilities as the server-side Yjs transport.
+2. The Editor Client shall use `y-websocket`'s `WebsocketProvider` as the client-side Yjs provider.
+3. The WebSocket server shall coexist with the existing Socket.IO server on the same HTTP server instance without port conflicts.
+4. The Yjs Service shall support `resyncInterval` (periodic state re-synchronization) to recover from any missed updates.
+
+### Requirement 3: Authentication and Authorization
+
+**Objective:** As a system administrator, I want WebSocket connections for collaborative editing to be authenticated and authorized, so that only permitted users can access page content via the Yjs channel.
+
+#### Acceptance Criteria
+
+1. When a WebSocket upgrade request is received for collaborative editing, the Yjs Service shall authenticate the user using the existing session/passport mechanism.
+2. When an authenticated user attempts to connect to a page's Yjs document, the Yjs Service shall verify that the user has read access to that page before allowing the connection.
+3. If an unauthenticated or unauthorized WebSocket upgrade request is received, the Yjs Service shall reject the connection with an appropriate HTTP error status.
+4. Where guest access is enabled for a page, the Yjs Service shall allow guest users to connect to that page's collaborative editing session.
+
+### Requirement 4: MongoDB Persistence
+
+**Objective:** As a system operator, I want the Yjs persistence layer to use MongoDB storage, so that draft state is preserved across server restarts and client reconnections.
+
+#### Acceptance Criteria
+
+1. The Yjs Service shall use the `yjs-writings` MongoDB collection for document persistence.
+2. The Yjs Service shall use the `MongodbPersistence` implementation (extended `y-mongodb-provider`).
+3. When a Y.Doc is loaded from persistence, the Yjs Service shall apply the persisted state before sending sync messages to connecting clients.
+4. When a Y.Doc receives updates, the Yjs Service shall persist each update to MongoDB with an `updatedAt` timestamp.
+5. When all clients disconnect from a document, the Yjs Service shall flush the document state to MongoDB before destroying the in-memory instance.
+
+### Requirement 5: Awareness and Presence Tracking
+
+**Objective:** As a wiki user, I want to see which other users are currently editing the same page, so that I can coordinate edits and avoid conflicts.
+
+#### Acceptance Criteria
+
+1. While a user is editing a page, the Editor Client shall broadcast the user's presence information (name, username, avatar, cursor color) via the Yjs awareness protocol.
+2. When a user connects or disconnects from a collaborative editing session, the Yjs Service shall emit awareness state size updates to the page's Socket.IO room (`page:{pageId}`).
+3. When the last user disconnects from a document, the Yjs Service shall emit a draft status notification (`YjsHasYdocsNewerThanLatestRevisionUpdated`) to the page's Socket.IO room.
+4. The Editor Client shall display the list of active editors based on awareness state updates from the Yjs provider.
+
+### Requirement 6: YDoc Status and Sync Integration
+
+**Objective:** As a system component, I want the YDoc status detection and force-sync mechanisms to function correctly, so that draft detection, save operations, and revision synchronization work as expected.
+
+#### Acceptance Criteria
+
+1. The Yjs Service shall expose `getYDocStatus(pageId)` returning the correct status (ISOLATED, NEW, DRAFT, SYNCED, OUTDATED).
+2. The Yjs Service shall expose `getCurrentYdoc(pageId)` returning the in-memory Y.Doc instance if one exists.
+3. When a Y.Doc is loaded from persistence (within `bindState`), the Yjs Service shall call `syncYDoc` to synchronize the document with the latest revision based on YDoc status.
+4. The Yjs Service shall expose `syncWithTheLatestRevisionForce(pageId)` for API-triggered force synchronization.

+ 69 - 0
.kiro/specs/collaborative-editor/research.md

@@ -0,0 +1,69 @@
+# Research & Design Decisions
+
+## Summary
+- **Feature**: `collaborative-editor`
+- **Key Findings**:
+  - y-websocket uses atomic `map.setIfUndefined` for document creation — eliminates TOCTOU race conditions
+  - `y-websocket@2.x` bundles both client and server utils with `yjs@^13` compatibility
+  - `ws` package already installed in GROWI; Express HTTP server supports adding WebSocket upgrade alongside Socket.IO
+
+## Design Decisions
+
+### Decision: Use y-websocket@2.x for both client and server
+
+- **Context**: Need yjs v13 compatibility on both client and server sides
+- **Alternatives Considered**:
+  1. y-websocket@3.x client + custom server — more work, v3 SyncStatus not needed
+  2. y-websocket@3.x + @y/websocket-server — requires yjs v14 migration (out of scope)
+  3. y-websocket@2.x for everything — simplest path, proven code
+- **Selected**: Option 3 — `y-websocket@2.x`
+- **Rationale**: Minimizes custom code, proven server utils, compatible with yjs v13, clear upgrade path to v3 + @y/websocket-server when yjs v14 migration happens
+- **Trade-offs**: Miss v3 SyncStatus feature, but `sync` event + `resyncInterval` meets all requirements
+- **Follow-up**: Plan separate yjs v14 migration, then upgrade to y-websocket v3 + @y/websocket-server
+
+### Decision: WebSocket path prefix `/yjs/`
+
+- **Context**: Need URL pattern that doesn't conflict with Socket.IO
+- **Selected**: `/yjs/{pageId}`
+- **Rationale**: Simple, semantic, no conflict with Socket.IO's `/socket.io/` path or Express routes
+
+### Decision: Session-based authentication on WebSocket upgrade
+
+- **Context**: Must authenticate WebSocket connections without Socket.IO middleware
+- **Selected**: Parse session cookie from HTTP upgrade request, deserialize user from session store
+- **Rationale**: Reuses existing session infrastructure — same cookie, same store, same passport serialization
+- **Trade-offs**: Couples to express-session internals, but GROWI already has this coupling throughout
+
+### Decision: Keep Socket.IO for awareness event fan-out
+
+- **Context**: GROWI uses Socket.IO rooms (`page:{pageId}`) to broadcast awareness updates to non-editor components
+- **Selected**: Continue using Socket.IO `io.in(roomName).emit()` for awareness events, bridging from y-websocket awareness
+- **Rationale**: Non-editor UI components already listen on Socket.IO rooms; changing this is out of scope
+
+## Critical Implementation Constraints
+
+### engine.io `destroyUpgrade` setting
+
+Socket.IO's engine.io v6 defaults `destroyUpgrade: true` in its `attach()` method. This causes engine.io to destroy all non-Socket.IO upgrade requests after a 1-second timeout. The Socket.IO server **must** be configured with `destroyUpgrade: false` to allow `/yjs/` WebSocket handshakes to succeed.
+
+### Next.js upgradeHandler race condition (guardSocket pattern)
+
+Next.js's `NextCustomServer.upgradeHandler` registers an `upgrade` listener on the HTTP server. When the Yjs async handler yields at its first `await`, Next.js's synchronous handler runs and calls `socket.end()` for unrecognized paths. The `guardSocket` pattern temporarily replaces `socket.end()`/`socket.destroy()` with no-ops before the first `await`, restoring them after auth completes.
+
+- `prependListener` cannot solve this — it only changes listener order, cannot prevent subsequent listeners from executing
+- Removing Next.js's listener is fragile and breaks HMR
+- Synchronous auth is impossible (requires async MongoDB/session store queries)
+
+### React render-phase violation in use-collaborative-editor-mode
+
+Provider creation and awareness event handlers must be placed **outside** `setProvider(() => { ... })` functional state updaters. If inside, `awareness.setLocalStateField()` triggers synchronous awareness events that update other components during render. All side effects go in the `useEffect` body; `setProvider(_provider)` is called with a plain value.
+
+### y-websocket bindState ordering
+
+y-websocket does NOT await `bindState` before sending sync messages. However, within `bindState` itself, the ordering is guaranteed: persistence load → YDocStatus check → syncYDoc → awareness registration. This consolidation is intentional.
+
+## References
+- [y-websocket GitHub](https://github.com/yjs/y-websocket)
+- [y-websocket-server GitHub](https://github.com/yjs/y-websocket-server) (yjs v14, future migration target)
+- [ws npm](https://www.npmjs.com/package/ws)
+- [y-mongodb-provider](https://github.com/MaxNoetzold/y-mongodb-provider)

+ 22 - 0
.kiro/specs/collaborative-editor/spec.json

@@ -0,0 +1,22 @@
+{
+  "feature_name": "collaborative-editor",
+  "created_at": "2026-03-19T00:00:00.000Z",
+  "updated_at": "2026-03-24T00:00:00.000Z",
+  "language": "en",
+  "phase": "active",
+  "approvals": {
+    "requirements": {
+      "generated": true,
+      "approved": true
+    },
+    "design": {
+      "generated": true,
+      "approved": true
+    },
+    "tasks": {
+      "generated": true,
+      "approved": true
+    }
+  },
+  "ready_for_implementation": true
+}

+ 3 - 0
.kiro/specs/collaborative-editor/tasks.md

@@ -0,0 +1,3 @@
+# Implementation Plan
+
+No pending tasks. Use `/kiro:spec-tasks collaborative-editor` to generate tasks for new work.

+ 0 - 58
.kiro/specs/optimize-presentation/requirements.md

@@ -1,58 +0,0 @@
-# Requirements Document
-
-## Introduction
-
-The GROWI presentation feature (`@growi/presentation` package) statically imports `@marp-team/marp-core` (~524KB) and `@marp-team/marpit` (~372KB) whenever any slide component loads, even when Marp rendering is not needed. This is because both `MarpSlides` and `GrowiSlides` components statically import `growi-marpit.ts`, which instantiates Marp objects at module scope.
-
-The goal is to decouple heavy Marp dependencies so they are only loaded when a page explicitly uses `marp: true` in its frontmatter, reducing the async chunk size for the common non-Marp slide rendering path and improving overall bundle efficiency.
-
-## Requirements
-
-### Requirement 1: Decouple GrowiSlides from Marp Runtime Dependencies
-
-**Objective:** As a developer, I want GrowiSlides to render without loading `@marp-team/marp-core` or `@marp-team/marpit`, so that non-Marp slide pages do not incur unnecessary module loading overhead.
-
-#### Acceptance Criteria
-1. The `@growi/presentation` build output for GrowiSlides shall not contain import references to `@marp-team/marp-core` or `@marp-team/marpit`.
-2. When a slide page without `marp: true` is rendered, the Presentation module shall render GrowiSlides without loading `@marp-team/marp-core` or `@marp-team/marpit` modules.
-3. The Presentation module shall provide the Marp base CSS (previously generated by `marpit.render('')`) as pre-extracted static string constants, so that GrowiSlides can apply Marp container styling without a runtime Marp dependency.
-4. The `MARP_CONTAINER_CLASS_NAME` constant shall be defined in a shared constants module, not in `growi-marpit.ts`, to avoid transitive Marp imports.
-
-### Requirement 2: Dynamic Loading of MarpSlides
-
-**Objective:** As a developer, I want MarpSlides to be loaded dynamically (on demand), so that the Marp rendering engine is only fetched when a page actually uses Marp.
-
-#### Acceptance Criteria
-1. When a slide page with `marp: true` is rendered, the Presentation module shall dynamically load MarpSlides and render Marp content correctly.
-2. While MarpSlides is loading, the Presentation module shall display a loading indicator (Suspense fallback).
-3. When a slide page without `marp: true` is rendered, the Presentation module shall not trigger the dynamic import of MarpSlides.
-
-### Requirement 3: Build-Time CSS Extraction
-
-**Objective:** As a developer, I want the Marp base CSS to be extracted at build time via an automated script, so that the pre-extracted CSS stays synchronized with the installed `@marp-team/marp-core` version.
-
-#### Acceptance Criteria
-1. The `@growi/presentation` package shall include a build-time script that generates Marp base CSS constants by invoking `slideMarpit.render('')` and `presentationMarpit.render('')`.
-2. When `pnpm run build` is executed for `@growi/presentation`, the build pipeline shall regenerate the CSS constants before compiling source files.
-3. The generated CSS constants file shall be committed to the repository so that `dev` mode works without running the extraction script first.
-
-### Requirement 4: Functional Equivalence
-
-**Objective:** As a user, I want the presentation feature to behave identically after optimization, so that existing Marp and non-Marp presentations continue to work without regression.
-
-#### Acceptance Criteria
-1. When a page with `marp: true` frontmatter is viewed inline, the Presentation module shall render Marp slides with correct styling.
-2. When a page with `slide: true` frontmatter (without `marp: true`) is viewed inline, the Presentation module shall render GrowiSlides with correct styling.
-3. When the presentation modal is opened for a Marp page, the Presentation module shall render Marp slides in the modal with correct fullscreen behavior.
-4. When the presentation modal is opened for a non-Marp slide page, the Presentation module shall render GrowiSlides in the modal with correct fullscreen behavior.
-5. When a non-slide page is viewed, the Presentation module shall not load any slide rendering components (existing lazy-loading behavior preserved).
-
-### Requirement 5: Build Verification
-
-**Objective:** As a developer, I want to verify that the optimization achieves its goal, so that Marp module separation can be confirmed in CI and during development.
-
-#### Acceptance Criteria
-1. The `@growi/presentation` package shall build successfully with `pnpm run build`.
-2. The `@growi/app` package shall build successfully with `turbo run build --filter @growi/app`.
-3. The built `GrowiSlides.js` output shall contain no references to `@marp-team/marp-core` or `@marp-team/marpit`.
-4. The built `Slides.js` output shall contain a dynamic `import()` expression for `MarpSlides`.

+ 0 - 49
.kiro/specs/optimize-presentation/tasks.md

@@ -1,49 +0,0 @@
-# Implementation Plan
-
-- [x] 1. Set up shared constants and build-time CSS extraction infrastructure
-- [x] 1.1 Move the Marp container class name constant to the shared constants module and update growi-marpit to import from there
-  - Add the `MARP_CONTAINER_CLASS_NAME` string constant to the existing shared constants module in the presentation package
-  - Update growi-marpit to import the constant from the shared module instead of defining it locally
-  - Re-export the constant from growi-marpit for backward compatibility with MarpSlides
-  - _Requirements: 1.4_
-
-- [x] 1.2 Create the build-time CSS extraction script
-  - Write a Node.js ESM script that instantiates Marp with the same configuration as growi-marpit (container classes, inlineSVG, emoji/html/math disabled)
-  - The script renders empty strings through both slide and presentation Marp instances to extract their CSS output
-  - Write the CSS strings as exported TypeScript constants to the constants directory
-  - Include a file header comment indicating the file is auto-generated and how to regenerate it
-  - Validate that extracted CSS is non-empty before writing
-  - _Requirements: 3.1_
-
-- [x] 1.3 Wire the extraction script into the build pipeline and generate the initial CSS file
-  - Add a `pre:build:src` script entry in the presentation package's package.json that runs the extraction script before the main Vite build
-  - Execute the script once to generate the initial pre-extracted CSS constants file
-  - Commit the generated file so that dev mode works without running extraction first
-  - _Requirements: 3.2, 3.3_
-
-- [x] 2. (P) Decouple GrowiSlides from Marp runtime dependencies
-  - Replace the growi-marpit import in GrowiSlides with imports from the shared constants module and the pre-extracted CSS constants
-  - Replace the runtime `marpit.render('')` call with a lookup of the pre-extracted CSS constant based on the presentation mode flag
-  - After this change, GrowiSlides must have no import path leading to `@marp-team/marp-core` or `@marp-team/marpit`
-  - Depends on task 1 (shared constants and CSS file must exist)
-  - _Requirements: 1.1, 1.2, 1.3_
-
-- [x] 3. (P) Add dynamic import for MarpSlides in the Slides routing component
-  - Replace the static import of MarpSlides with a React.lazy dynamic import that resolves the named export
-  - Wrap the MarpSlides rendering branch in a Suspense boundary with a simple loading fallback
-  - Keep GrowiSlides as a static import (the common, lightweight path)
-  - The dynamic import ensures MarpSlides and its transitive Marp dependencies are only loaded when `hasMarpFlag` is true
-  - Depends on task 1 (shared constants must exist); parallel-safe with task 2 (different file)
-  - _Requirements: 2.1, 2.2, 2.3_
-
-- [x] 4. Build verification and functional validation
-- [x] 4.1 Build the presentation package and verify module separation in the output
-  - Run the presentation package build and confirm it succeeds
-  - Inspect the built GrowiSlides output file to confirm it contains no references to `@marp-team/marp-core` or `@marp-team/marpit`
-  - Inspect the built Slides output file to confirm it contains a dynamic `import()` expression for MarpSlides
-  - _Requirements: 5.1, 5.3, 5.4_
-
-- [x] 4.2 Build the main GROWI application and verify successful compilation
-  - Run the full app build to confirm no regressions from the presentation package changes
-  - Verify that both Marp and non-Marp slide rendering paths are intact by checking the build completes without type errors
-  - _Requirements: 4.1, 4.2, 4.3, 4.4, 4.5, 5.2_

+ 1 - 41
.kiro/specs/optimize-presentation/design.md → .kiro/specs/presentation/design.md

@@ -1,4 +1,4 @@
-# Design Document: optimize-presentation
+# Design Document: presentation
 
 
 ## Overview
 ## Overview
 
 
@@ -118,23 +118,6 @@ flowchart TD
   F --> G[vite build compiles all sources]
   F --> G[vite build compiles all sources]
 ```
 ```
 
 
-## Requirements Traceability
-
-| Requirement | Summary | Components | Interfaces | Flows |
-|-------------|---------|------------|------------|-------|
-| 1.1 | No marp-core references in GrowiSlides build output | GrowiSlides, marpit-base-css | — | — |
-| 1.2 | Non-Marp slides render without loading Marp | Slides, GrowiSlides | — | Slide Rendering Decision |
-| 1.3 | Pre-extracted CSS constants for container styling | marpit-base-css | MarpitBaseCss | CSS Extraction |
-| 1.4 | MARP_CONTAINER_CLASS_NAME in shared consts | consts/index.ts | — | — |
-| 2.1 | Dynamic load MarpSlides for marp:true pages | Slides | — | Slide Rendering Decision |
-| 2.2 | Loading indicator during MarpSlides load | Slides | — | Slide Rendering Decision |
-| 2.3 | No MarpSlides import triggered for non-Marp | Slides | — | Slide Rendering Decision |
-| 3.1 | Build-time CSS extraction script | extract-marpit-css.mjs | ExtractScript | CSS Extraction |
-| 3.2 | Extraction runs before source compilation | package.json pre:build:src | — | CSS Extraction |
-| 3.3 | Generated file committed for dev mode | marpit-base-css.ts | — | — |
-| 4.1–4.5 | Functional equivalence across all render paths | All components | — | Both flows |
-| 5.1–5.4 | Build verification of module separation | Build outputs | — | — |
-
 ## Components and Interfaces
 ## Components and Interfaces
 
 
 | Component | Domain | Intent | Req Coverage | Key Dependencies | Contracts |
 | Component | Domain | Intent | Req Coverage | Key Dependencies | Contracts |
@@ -299,26 +282,3 @@ export const presentationMarpit: Marp;
 - Validation: Script exits with error if CSS extraction produces empty output
 - Validation: Script exits with error if CSS extraction produces empty output
 - Risks: Marp options must stay synchronized with `growi-marpit.ts`
 - Risks: Marp options must stay synchronized with `growi-marpit.ts`
 
 
-## Testing Strategy
-
-### Unit Tests
-- Verify `GrowiSlides` renders correctly with pre-extracted CSS constants (no Marp imports in test)
-- Verify `Slides` renders `GrowiSlides` when `hasMarpFlag` is false/undefined
-- Verify `Slides` renders `MarpSlides` (via Suspense) when `hasMarpFlag` is true
-
-### Build Verification Tests
-- `GrowiSlides.js` build output contains no references to `@marp-team/marp-core` or `@marp-team/marpit`
-- `Slides.js` build output contains dynamic `import()` for MarpSlides
-- `@growi/presentation` builds without errors
-- `@growi/app` builds without errors
-
-### Integration Tests
-- Marp slide page (`marp: true`) renders correctly in inline view
-- Non-Marp slide page (`slide: true`) renders correctly in inline view
-- Presentation modal works for both Marp and non-Marp content
-
-## Performance & Scalability
-
-**Target**: Eliminate ~896KB of Marp-related JavaScript from the async chunk loaded for non-Marp slide rendering.
-
-**Measurement**: Compare the chunk contents before and after optimization using the existing `ChunkModuleStatsPlugin` or manual inspection of build output. The `initial` module count (primary KPI from build-optimization skill) is not directly affected since slides are already in async chunks, but the async chunk size is reduced.

+ 26 - 0
.kiro/specs/presentation/requirements.md

@@ -0,0 +1,26 @@
+# Presentation Feature — Requirements Overview
+
+## Introduction
+
+The GROWI presentation feature (`@growi/presentation` package) provides slide rendering for wiki pages using frontmatter flags. It supports two rendering modes:
+
+- **GrowiSlides** (`slide: true`): Lightweight slide rendering using ReactMarkdown with Marp container styling applied via pre-extracted CSS constants. Does not load Marp runtime dependencies.
+- **MarpSlides** (`marp: true`): Full Marp-powered slide rendering using `@marp-team/marp-core`, loaded dynamically only when needed.
+
+## Key Requirements
+
+### 1. Module Separation
+
+GrowiSlides renders without loading `@marp-team/marp-core` or `@marp-team/marpit`. Marp dependencies are isolated behind a dynamic import boundary (`React.lazy`) and only loaded for pages with `marp: true`.
+
+### 2. Build-Time CSS Extraction
+
+Marp base CSS is pre-extracted at build time via `extract-marpit-css.mjs`. The generated constants file (`consts/marpit-base-css.ts`) is committed to the repository so that dev mode works without running the extraction script first. The extraction runs automatically before source compilation via the `pre:build:src` script.
+
+### 3. Functional Equivalence
+
+Both Marp and non-Marp slide pages render correctly in inline view and presentation modal. No behavioral differences from the user's perspective.
+
+### 4. Build Integrity
+
+Both `@growi/presentation` and `@growi/app` build successfully. The GrowiSlides build output contains no Marp module references, and the Slides build output contains a dynamic `import()` for MarpSlides.

+ 0 - 0
.kiro/specs/optimize-presentation/research.md → .kiro/specs/presentation/research.md


+ 3 - 2
.kiro/specs/optimize-presentation/spec.json → .kiro/specs/presentation/spec.json

@@ -1,9 +1,10 @@
 {
 {
-  "feature_name": "optimize-presentation",
+  "feature_name": "presentation",
   "created_at": "2026-03-05T12:00:00Z",
   "created_at": "2026-03-05T12:00:00Z",
-  "updated_at": "2026-03-05T13:30:00Z",
+  "updated_at": "2026-03-23T00:00:00Z",
   "language": "en",
   "language": "en",
   "phase": "implementation-complete",
   "phase": "implementation-complete",
+  "cleanup_completed": true,
   "approvals": {
   "approvals": {
     "requirements": {
     "requirements": {
       "generated": true,
       "generated": true,

+ 262 - 0
.kiro/specs/upgrade-fixed-packages/design.md

@@ -0,0 +1,262 @@
+# Design Document: upgrade-fixed-packages
+
+## Overview
+
+**Purpose**: This feature audits and upgrades version-pinned packages in `apps/app/package.json` that were frozen due to upstream bugs, ESM-only migrations, or licensing constraints. The build environment has shifted from webpack to Turbopack, and the runtime now targets Node.js 24 with stable `require(esm)` support, invalidating several original pinning reasons.
+
+**Users**: Maintainers and developers benefit from up-to-date dependencies with bug fixes, security patches, and reduced technical debt.
+
+**Impact**: Modifies `apps/app/package.json` dependency versions and comment blocks; touches source files where `escape-string-regexp` is replaced by native `RegExp.escape()`.
+
+### Goals
+- Verify each pinning reason against current upstream status
+- Upgrade packages where the original constraint no longer applies
+- Replace `escape-string-regexp` with native `RegExp.escape()` (Node.js 24)
+- Update or remove comment blocks to reflect current state
+- Produce audit documentation for future reference
+
+### Non-Goals
+- Replacing handsontable with an alternative library (license constraint remains; replacement is a separate initiative)
+- Upgrading `@keycloak/keycloak-admin-client` to v19+ (significant API breaking changes; deferred to separate task)
+- Major version upgrades of unrelated packages
+- Modifying the build pipeline or Turbopack configuration
+
+## Architecture
+
+This is a dependency maintenance task, not a feature implementation. No new components or architectural changes are introduced.
+
+### Existing Architecture Analysis
+
+The pinned packages fall into distinct categories by their usage context:
+
+| Category | Packages | Build Context |
+|----------|----------|---------------|
+| Server-only (tsc → CJS) | `escape-string-regexp`, `@aws-sdk/*`, `@keycloak/*` | Express server compiled by tsc |
+| Client-only (Turbopack) | `string-width` (via @growi/editor), `bootstrap` | Bundled by Turbopack/Vite |
+| Client + SSR | `next-themes` | Turbopack + SSR rendering |
+| License-pinned | `handsontable`, `@handsontable/react` | Client-only |
+
+Key enabler: Node.js ^24 provides stable `require(esm)` support, removing the fundamental CJS/ESM incompatibility that caused several pins.
+
+### Technology Stack
+
+| Layer | Choice / Version | Role in Feature | Notes |
+|-------|------------------|-----------------|-------|
+| Runtime | Node.js ^24 | Enables `require(esm)` and `RegExp.escape()` | ES2026 Stage 4 features available |
+| Build (client) | Turbopack (Next.js 16) | Bundles ESM-only packages without issues | No changes needed |
+| Build (server) | tsc (CommonJS output) | `require(esm)` handles ESM-only imports | Node.js 24 native support |
+| Package manager | pnpm v10 | Manages dependency resolution | No changes needed |
+
+## System Flows
+
+### Upgrade Verification Flow
+
+```mermaid
+flowchart TD
+    Start[Select package to upgrade] --> Update[Update version in package.json]
+    Update --> Install[pnpm install]
+    Install --> Build{turbo run build}
+    Build -->|Pass| Lint{turbo run lint}
+    Build -->|Fail| Revert[Revert package change]
+    Lint -->|Pass| Test{turbo run test}
+    Lint -->|Fail| Revert
+    Test -->|Pass| Verify[Verify .next/node_modules symlinks]
+    Test -->|Fail| Revert
+    Verify --> Next[Proceed to next package]
+    Revert --> Document[Document failure reason]
+    Document --> Next
+```
+
+Each package is upgraded and verified independently. Failures are isolated and reverted without affecting other upgrades.
+
+## Requirements Traceability
+
+| Requirement | Summary | Components | Action |
+|-------------|---------|------------|--------|
+| 1.1 | Bootstrap bug investigation | PackageAudit | Verify #39798 fixed in v5.3.4 |
+| 1.2 | next-themes issue investigation | PackageAudit | Verify #122 resolved; check v0.4.x compatibility |
+| 1.3 | @aws-sdk constraint verification | PackageAudit | Confirm mongodb constraint is on different package |
+| 1.4 | Document investigation results | AuditReport | Summary table in research.md |
+| 2.1 | ESM compatibility per package | PackageAudit | Assess escape-string-regexp, string-width, @keycloak |
+| 2.2 | Server build ESM support | PackageAudit | Verify Node.js 24 require(esm) for server context |
+| 2.3 | Client build ESM support | PackageAudit | Confirm Turbopack handles ESM-only packages |
+| 2.4 | Compatibility matrix | AuditReport | Table in research.md |
+| 3.1 | Handsontable license check | PackageAudit | Confirm v7+ still non-MIT |
+| 3.2 | Document pinning requirement | AuditReport | Note in audit summary |
+| 4.1 | Update package.json versions and comments | UpgradeExecution | Modify versions and comment blocks |
+| 4.2 | Build verification | UpgradeExecution | `turbo run build --filter @growi/app` |
+| 4.3 | Lint verification | UpgradeExecution | `turbo run lint --filter @growi/app` |
+| 4.4 | Test verification | UpgradeExecution | `turbo run test --filter @growi/app` |
+| 4.5 | Revert on failure | UpgradeExecution | Git revert per package |
+| 4.6 | Update comment blocks | UpgradeExecution | Remove or update comments |
+| 5.1 | Audit summary table | AuditReport | Final summary with decisions |
+| 5.2 | Document continued pinning | AuditReport | Reasons for remaining pins |
+| 5.3 | Document upgrade rationale | AuditReport | What changed upstream |
+
+## Components and Interfaces
+
+| Component | Domain | Intent | Req Coverage | Key Dependencies |
+|-----------|--------|--------|--------------|------------------|
+| PackageAudit | Investigation | Research upstream status for each pinned package | 1.1–1.4, 2.1–2.4, 3.1–3.2 | GitHub issues, npm registry |
+| UpgradeExecution | Implementation | Apply version changes and verify build | 4.1–4.6 | pnpm, turbo, tsc |
+| SourceMigration | Implementation | Replace escape-string-regexp with RegExp.escape() | 4.1 | 9 source files |
+| AuditReport | Documentation | Produce summary of all decisions | 5.1–5.3 | research.md |
+
+### Investigation Layer
+
+#### PackageAudit
+
+| Field | Detail |
+|-------|--------|
+| Intent | Investigate upstream status of each pinned package and determine upgrade feasibility |
+| Requirements | 1.1, 1.2, 1.3, 1.4, 2.1, 2.2, 2.3, 2.4, 3.1, 3.2 |
+
+**Responsibilities & Constraints**
+- Check upstream issue trackers for bug fix status
+- Verify ESM compatibility against Node.js 24 `require(esm)` and Turbopack
+- Confirm license status for handsontable
+- Produce actionable recommendation per package
+
+**Audit Decision Matrix**
+
+| Package | Current | Action | Target | Risk | Rationale |
+|---------|---------|--------|--------|------|-----------|
+| `bootstrap` | `=5.3.2` | Upgrade | `^5.3.4` | Low | Bug #39798 fixed in v5.3.4 |
+| `next-themes` | `^0.2.1` | Upgrade | `^0.4.4` | Medium | Original issue was misattributed; v0.4.x works with Pages Router |
+| `escape-string-regexp` | `^4.0.0` | Replace | Remove dep | Low | Native `RegExp.escape()` in Node.js 24 |
+| `string-width` | `=4.2.2` | Upgrade | `^7.0.0` | Low | Used only in ESM context (@growi/editor) |
+| `@aws-sdk/client-s3` | `3.454.0` | Relax | `^3.454.0` | Low | Pinning comment was misleading |
+| `@aws-sdk/s3-request-presigner` | `3.454.0` | Relax | `^3.454.0` | Low | Same as above |
+| `@keycloak/keycloak-admin-client` | `^18.0.0` | Defer | No change | N/A | API breaking changes; separate task |
+| `handsontable` | `=6.2.2` | Keep | No change | N/A | License constraint (non-MIT since v7) |
+| `@handsontable/react` | `=2.1.0` | Keep | No change | N/A | Requires handsontable >= 7 |
+
+### Implementation Layer
+
+#### UpgradeExecution
+
+| Field | Detail |
+|-------|--------|
+| Intent | Apply version changes incrementally with build verification |
+| Requirements | 4.1, 4.2, 4.3, 4.4, 4.5, 4.6 |
+
+**Responsibilities & Constraints**
+- Upgrade one package at a time to isolate failures
+- Run full verification suite (build, lint, test) after each change
+- Revert and document any package that causes failures
+- Update `// comments for dependencies` block to reflect new state
+
+**Upgrade Order** (lowest risk first):
+1. `@aws-sdk/*` — relax version range (no code changes)
+2. `string-width` — upgrade in @growi/editor (isolated ESM package)
+3. `bootstrap` — upgrade to ^5.3.4 (verify SCSS compilation)
+4. `escape-string-regexp` → `RegExp.escape()` — source code changes across 9 files
+5. `next-themes` — upgrade to ^0.4.x (review API changes across 12 files)
+
+**Implementation Notes**
+- After each upgrade, verify `.next/node_modules/` symlinks for Turbopack externalisation compliance (per `package-dependencies` rule)
+- For bootstrap: run `pnpm run pre:styles-commons` and `pnpm run pre:styles-components` to verify SCSS compilation
+- For next-themes: review v0.3.0 and v0.4.0 changelogs for breaking API changes before modifying code
+
+#### SourceMigration
+
+| Field | Detail |
+|-------|--------|
+| Intent | Replace all `escape-string-regexp` usage with native `RegExp.escape()` |
+| Requirements | 4.1 |
+
+**Files to Modify**:
+
+`apps/app/src/` (6 files):
+- `server/models/page.ts`
+- `server/service/page/index.ts`
+- `server/service/page-grant.ts`
+- `server/routes/apiv3/users.js`
+- `server/models/obsolete-page.js`
+- `features/openai/server/services/openai.ts`
+
+`packages/` (3 files):
+- `packages/core/src/utils/page-path-utils/` (2 files)
+- `packages/remark-lsx/src/server/routes/list-pages/index.ts`
+
+**Migration Pattern**:
+```typescript
+// Before
+import escapeStringRegexp from 'escape-string-regexp';
+const pattern = new RegExp(escapeStringRegexp(input));
+
+// After
+const pattern = new RegExp(RegExp.escape(input));
+```
+
+**Implementation Notes**
+- Remove `escape-string-regexp` from `apps/app/package.json` dependencies after migration
+- Remove from `packages/core/package.json` and `packages/remark-lsx/package.json` if listed
+- Verify `RegExp.escape()` TypeScript types are available (may need `@types/node` update or lib config)
+
+### Documentation Layer
+
+#### AuditReport
+
+| Field | Detail |
+|-------|--------|
+| Intent | Document all audit decisions for future maintainers |
+| Requirements | 5.1, 5.2, 5.3 |
+
+**Deliverables**:
+- Updated `// comments for dependencies` in package.json (only retained pins with current reasons)
+- Updated `// comments for defDependencies` (handsontable entries unchanged)
+- Summary in research.md with final decision per package
+
+**Updated Comment Blocks** (target state):
+
+```json
+{
+  "// comments for dependencies": {
+    "@keycloak/keycloak-admin-client": "19.0.0 or above exports only ESM. API breaking changes require separate migration effort.",
+    "next-themes": "(if upgrade fails) Document specific failure reason here"
+  },
+  "// comments for defDependencies": {
+    "@handsontable/react": "v3 requires handsontable >= 7.0.0.",
+    "handsontable": "v7.0.0 or above is no longer MIT license."
+  }
+}
+```
+
+Note: The exact final state depends on which upgrades succeed. If all planned upgrades pass, only `@keycloak` and `handsontable` entries remain.
+
+## Testing Strategy
+
+### Build Verification (per package)
+- `turbo run build --filter @growi/app` — Turbopack client build + tsc server build
+- `ls apps/app/.next/node_modules/ | grep <package>` — Externalisation check
+- `pnpm run pre:styles-commons` — SCSS compilation (bootstrap only)
+
+### Lint Verification (per package)
+- `turbo run lint --filter @growi/app` — TypeScript type check + Biome
+
+### Unit/Integration Tests (per package)
+- `turbo run test --filter @growi/app` — Full test suite
+- For `RegExp.escape()` migration: run tests for page model, page service, page-grant service specifically
+
+### Regression Verification (final)
+- Full build + lint + test after all upgrades applied together
+- Verify `.next/node_modules/` symlink integrity via `check-next-symlinks.sh` (if available locally)
+
+## Migration Strategy
+
+```mermaid
+flowchart LR
+    Phase1[Phase 1: Low Risk] --> Phase2[Phase 2: Medium Risk]
+    Phase1 --> P1a[aws-sdk relax range]
+    Phase1 --> P1b[string-width upgrade]
+    Phase2 --> P2a[bootstrap upgrade]
+    Phase2 --> P2b[escape-string-regexp replace]
+    Phase2 --> P2c[next-themes upgrade]
+```
+
+- **Phase 1** (low risk): @aws-sdk range relaxation, string-width upgrade — minimal code changes
+- **Phase 2** (medium risk): bootstrap, escape-string-regexp replacement, next-themes — requires code review and/or source changes
+- Each upgrade is independently revertible
+- Deferred: @keycloak (high risk, separate task)
+- No change: handsontable (license constraint)

+ 75 - 0
.kiro/specs/upgrade-fixed-packages/requirements.md

@@ -0,0 +1,75 @@
+# Requirements Document
+
+## Introduction
+
+The `apps/app/package.json` file contains several packages whose versions are intentionally pinned due to ESM-only upgrades, upstream bugs, or licensing concerns. These pinning reasons were documented in `// comments for dependencies` and `// comments for defDependencies` comment blocks. Since the build environment has significantly changed (webpack → Turbopack), and upstream issues may have been resolved, a systematic audit is needed to determine which packages can now be safely upgraded.
+
+### Pinned Packages Inventory
+
+| # | Package | Current Version | Pinning Reason |
+|---|---------|----------------|----------------|
+| 1 | `@aws-sdk/client-s3`, `@aws-sdk/s3-request-presigner` | `3.454.0` | Fix version above 3.186.0 required by mongodb@4.16.0 |
+| 2 | `@keycloak/keycloak-admin-client` | `^18.0.0` | 19.0.0+ exports only ESM |
+| 3 | `bootstrap` | `=5.3.2` | v5.3.3 has a bug (twbs/bootstrap#39798) |
+| 4 | `escape-string-regexp` | `^4.0.0` | 5.0.0+ exports only ESM |
+| 5 | `next-themes` | `^0.2.1` | 0.3.0 causes type error (pacocoursey/next-themes#122) |
+| 6 | `string-width` | `=4.2.2` | 5.0.0+ exports only ESM |
+| 7 | `@handsontable/react` | `=2.1.0` | v3 requires handsontable >= 7.0.0 |
+| 8 | `handsontable` | `=6.2.2` | v7.0.0+ is no longer MIT license |
+
+## Requirements
+
+### Requirement 1: Upstream Bug and Issue Investigation
+
+**Objective:** As a maintainer, I want to verify whether upstream bugs and issues that originally caused version pinning have been resolved, so that I can make informed upgrade decisions.
+
+#### Acceptance Criteria
+
+1. When investigating the bootstrap pinning, the audit process shall check the current status of https://github.com/twbs/bootstrap/issues/39798 and determine whether v5.3.3+ has fixed the reported bug.
+2. When investigating the next-themes pinning, the audit process shall check the current status of https://github.com/pacocoursey/next-themes/issues/122 and determine whether v0.3.0+ has resolved the type error.
+3. When investigating the @aws-sdk pinning, the audit process shall verify whether the mongodb version used in GROWI still requires the `>=3.186.0` constraint and whether the latest @aws-sdk versions are compatible.
+4. The audit process shall document the investigation result for each package, including: current upstream status, whether the original issue is resolved, and the recommended action (upgrade/keep/replace).
+
+### Requirement 2: ESM-Only Package Compatibility Assessment
+
+**Objective:** As a maintainer, I want to assess whether ESM-only versions of pinned packages are now compatible with the current Turbopack-based build environment, so that outdated CJS-only constraints can be removed.
+
+#### Acceptance Criteria
+
+1. When assessing ESM compatibility, the audit process shall evaluate each ESM-pinned package (`escape-string-regexp`, `string-width`, `@keycloak/keycloak-admin-client`) against the current build pipeline (Turbopack for client, tsc for server).
+2. When a package is used in server-side code (transpiled via tsc with `tsconfig.build.server.json`), the audit process shall verify whether the server build output format (CJS or ESM) supports importing ESM-only packages.
+3. When a package is used only in client-side code (bundled via Turbopack), the audit process shall confirm that Turbopack can resolve ESM-only packages without issues.
+4. The audit process shall produce a compatibility matrix showing each ESM-pinned package, its usage context (server/client/both), and whether upgrading to the ESM-only version is feasible.
+
+### Requirement 3: License Compliance Verification
+
+**Objective:** As a maintainer, I want to confirm that the handsontable/`@handsontable/react` licensing situation has not changed, so that I can determine whether these packages must remain pinned or can be replaced.
+
+#### Acceptance Criteria
+
+1. When evaluating handsontable, the audit process shall verify the current license of handsontable v7.0.0+ and confirm whether it remains non-MIT.
+2. If handsontable v7.0.0+ is still non-MIT, the audit process shall document that `handsontable` (`=6.2.2`) and `@handsontable/react` (`=2.1.0`) must remain pinned or an alternative library must be identified.
+3. If a MIT-licensed alternative to handsontable exists, the audit process shall note it as a potential replacement candidate (out of scope for this spec but documented for future work).
+
+### Requirement 4: Safe Upgrade Execution
+
+**Objective:** As a maintainer, I want to upgrade packages that are confirmed safe to update, so that the project benefits from bug fixes, security patches, and new features.
+
+#### Acceptance Criteria
+
+1. When upgrading a pinned package, the upgrade process shall update the version specifier in `apps/app/package.json` and remove or update the corresponding entry in the `// comments for dependencies` or `// comments for defDependencies` block.
+2. When a package is upgraded, the upgrade process shall verify that `turbo run build --filter @growi/app` completes successfully.
+3. When a package is upgraded, the upgrade process shall verify that `turbo run lint --filter @growi/app` completes without new errors.
+4. When a package is upgraded, the upgrade process shall verify that `turbo run test --filter @growi/app` passes without new failures.
+5. If a package upgrade causes build, lint, or test failures, the upgrade process shall revert that specific package change and document the failure reason.
+6. When all upgrades are complete, the `// comments for dependencies` and `// comments for defDependencies` blocks shall accurately reflect only the packages that remain pinned, with updated reasons if applicable.
+
+### Requirement 5: Audit Documentation
+
+**Objective:** As a maintainer, I want a clear record of the audit results, so that future maintainers understand which packages were evaluated and why decisions were made.
+
+#### Acceptance Criteria
+
+1. The audit process shall produce a summary table documenting each pinned package with: package name, previous version, new version (or "unchanged"), and rationale for the decision.
+2. When a package remains pinned, the documentation shall include the verified reason for continued pinning.
+3. When a package is upgraded, the documentation shall note what changed upstream that made the upgrade possible.

+ 183 - 0
.kiro/specs/upgrade-fixed-packages/research.md

@@ -0,0 +1,183 @@
+# Research & Design Decisions
+
+---
+**Purpose**: Capture discovery findings for the pinned package audit and upgrade initiative.
+**Usage**: Inform design.md decisions; provide evidence for future maintainers.
+---
+
+## Summary
+- **Feature**: `upgrade-fixed-packages`
+- **Discovery Scope**: Extension (auditing existing dependency constraints)
+- **Key Findings**:
+  - Bootstrap bug (#39798) fixed in v5.3.4 — safe to upgrade to latest 5.3.x
+  - next-themes original issue (#122) was resolved long ago; upgrade to v0.4.x feasible but has Next.js 16 `cacheComponents` caveat
+  - Node.js ^24 enables stable `require(esm)`, unlocking ESM-only package upgrades for server code
+  - `escape-string-regexp` can be replaced entirely by native `RegExp.escape()` (ES2026, Node.js 24)
+  - handsontable license situation unchanged — must remain pinned at 6.2.2
+  - @aws-sdk pinning comment is misleading; packages can be freely upgraded
+
+## Research Log
+
+### Bootstrap v5.3.3 Bug (#39798)
+- **Context**: bootstrap pinned at `=5.3.2` due to modal header regression in v5.3.3
+- **Sources Consulted**: https://github.com/twbs/bootstrap/issues/39798, https://github.com/twbs/bootstrap/pull/41336
+- **Findings**:
+  - Issue CLOSED on 2025-04-03
+  - Fixed in v5.3.4 via PR #41336 (Fix modal and offcanvas header collapse)
+  - Bug: `.modal-header` lost `justify-content: space-between`, causing content collapse
+  - Latest stable: v5.3.8 (August 2025)
+- **Implications**: Safe to upgrade from `=5.3.2` to `^5.3.4`. Skip v5.3.3 entirely. Recommend `^5.3.4` or pin to latest `=5.3.8`.
+
+### next-themes Type Error (#122)
+- **Context**: next-themes pinned at `^0.2.1` due to reported type error in v0.3.0
+- **Sources Consulted**: https://github.com/pacocoursey/next-themes/issues/122, https://github.com/pacocoursey/next-themes/issues/375
+- **Findings**:
+  - Issue #122 CLOSED on 2022-06-02 — was specific to an old beta version (v0.0.13-beta.3), not v0.3.0
+  - The pinning reason was based on incomplete information; v0.2.0+ already had the fix
+  - Latest: v0.4.6 (March 2025). Peers: `react ^16.8 || ^17 || ^18 || ^19`
+  - **Caveat**: Issue #375 reports a bug with Next.js 16's `cacheComponents` feature — stale theme values when cached components reactivate
+  - PR #377 in progress to fix via `useSyncExternalStore`
+  - Without `cacheComponents`, v0.4.6 works fine with Next.js 16
+- **Implications**: Upgrade to v0.4.x is feasible. GROWI uses Pages Router (not App Router), so `cacheComponents` is likely not relevant. Breaking API changes between v0.2 → v0.4 need review. Used in 12 files across apps/app.
+
+### ESM-only Package Compatibility (escape-string-regexp, string-width, @keycloak)
+- **Context**: Three packages pinned to CJS-compatible versions because newer versions are ESM-only
+- **Sources Consulted**: Node.js v22.12.0 release notes (require(esm) enabled by default), TC39 RegExp.escape Stage 4, sindresorhus ESM guidance, npm package pages
+- **Findings**:
+
+  **escape-string-regexp** (^4.0.0):
+  - Used in 6 server-side files + 3 shared package files (all server context)
+  - Node.js 24 has stable `require(esm)` — ESM-only v5 would work
+  - **Better**: `RegExp.escape()` is ES2026 Stage 4, natively available in Node.js 24 (V8 support)
+  - Can eliminate the dependency entirely
+
+  **string-width** (=4.2.2):
+  - Used only in `packages/editor/src/models/markdown-table.js`
+  - `@growi/editor` has `"type": "module"` and builds with Vite (ESM context)
+  - No server-side value imports (only type imports in `sync-ydoc.ts`, erased at compile)
+  - Safe to upgrade to v7.x
+
+  **@keycloak/keycloak-admin-client** (^18.0.0):
+  - Used in 1 server-side file: `features/external-user-group/server/service/keycloak-user-group-sync.ts`
+  - Latest: v26.5.5 (February 2026)
+  - `require(esm)` in Node.js 24 should handle it, but API has significant breaking changes (v18 → v26)
+  - Sub-path exports need verification
+  - Higher risk upgrade — API surface changes expected
+
+- **Implications**: string-width is the easiest upgrade. escape-string-regexp should be replaced by native `RegExp.escape()`. @keycloak requires careful API migration and is higher risk.
+
+### @aws-sdk Pinning Analysis
+- **Context**: @aws-sdk/client-s3 and @aws-sdk/s3-request-presigner pinned at 3.454.0
+- **Sources Consulted**: mongodb package.json, npm registry, GROWI source code
+- **Findings**:
+  - Pinning comment says "required by mongodb@4.16.0" but is misleading
+  - mongodb@4.17.2 has `@aws-sdk/credential-providers: ^3.186.0` as **optional** dependency — a different package
+  - The S3 client packages are used directly by GROWI for file upload (server/service/file-uploader/aws/)
+  - Latest: @aws-sdk/client-s3@3.1014.0 (March 2026) — over 500 versions behind
+  - AWS SDK v3 follows semver; any 3.x should be compatible
+- **Implications**: Remove the misleading comment. Change from exact `3.454.0` to `^3.454.0` or update to latest. Low risk.
+
+### Handsontable License Status
+- **Context**: handsontable pinned at =6.2.2 (last MIT version), @handsontable/react at =2.1.0
+- **Sources Consulted**: handsontable.com/docs/software-license, npm, Hacker News discussion
+- **Findings**:
+  - v7.0.0+ (March 2019) switched from MIT to proprietary license — unchanged as of 2026
+  - Free "Hobby" license exists but restricted to non-commercial personal use
+  - Commercial use requires paid subscription
+  - MIT alternatives: AG Grid Community (most mature), Jspreadsheet CE, Univer (Apache 2.0)
+- **Implications**: Must remain pinned. No action possible without license purchase or library replacement. Library replacement is out of scope for this spec.
+
+## Design Decisions
+
+### Decision: Replace escape-string-regexp with native RegExp.escape()
+- **Context**: escape-string-regexp v5 is ESM-only; used in 9 files across server code
+- **Alternatives Considered**:
+  1. Upgrade to v5 with require(esm) support — works but adds unnecessary dependency
+  2. Replace with native `RegExp.escape()` — zero dependencies, future-proof
+- **Selected Approach**: Replace with `RegExp.escape()`
+- **Rationale**: Node.js 24 supports `RegExp.escape()` natively (ES2026 Stage 4). Eliminates a dependency entirely.
+- **Trade-offs**: Requires touching 9 files, but changes are mechanical (find-and-replace)
+- **Follow-up**: Verify `RegExp.escape()` is available in the project's Node.js 24 target
+
+### Decision: Upgrade string-width directly to v7.x
+- **Context**: Used only in @growi/editor (ESM package, Vite-bundled, client-only)
+- **Selected Approach**: Direct upgrade to latest v7.x
+- **Rationale**: Consumer is already ESM; zero CJS concern
+- **Trade-offs**: None significant; API is stable
+
+### Decision: Upgrade bootstrap to ^5.3.4
+- **Context**: Bug fixed in v5.3.4; latest is 5.3.8
+- **Selected Approach**: Change from `=5.3.2` to `^5.3.4`
+- **Rationale**: Original bug resolved; skip v5.3.3
+- **Trade-offs**: Need to verify GROWI's custom SCSS and modal usage against 5.3.4+ changes
+
+### Decision: Upgrade next-themes to latest 0.4.x
+- **Context**: Original issue was a misunderstanding; latest is v0.4.6
+- **Selected Approach**: Upgrade to `^0.4.4` (or latest)
+- **Rationale**: Issue #122 was specific to old beta, not v0.3.0. GROWI uses Pages Router, so cacheComponents bug is not relevant.
+- **Trade-offs**: Breaking API changes between v0.2 → v0.4 need review. 12 files import from next-themes.
+- **Follow-up**: Review v0.3.0 and v0.4.0 changelogs for breaking changes
+
+### Decision: Relax @aws-sdk version to caret range
+- **Context**: Pinning was based on misleading comment; packages are independent of mongodb constraint
+- **Selected Approach**: Change from `3.454.0` to `^3.454.0`
+- **Rationale**: AWS SDK v3 follows semver; the comment conflated credential-providers with S3 client
+- **Trade-offs**: Low risk. Conservative approach keeps minimum at 3.454.0.
+
+### Decision: Defer @keycloak upgrade (high risk)
+- **Context**: v18 → v26 has significant API breaking changes; only 1 file affected
+- **Selected Approach**: Document as upgradeable but defer to a separate task
+- **Rationale**: API migration requires Keycloak server compatibility testing; out of proportion for a batch upgrade task
+- **Trade-offs**: Remains on old version longer, but isolated to one feature
+
+### Decision: Keep handsontable pinned (license constraint)
+- **Context**: v7+ is proprietary; no free alternative that's drop-in
+- **Selected Approach**: No change. Document for future reference.
+- **Rationale**: License constraint is permanent unless library is replaced entirely
+- **Trade-offs**: None — this is a business/legal decision, not technical
+
+## Risks & Mitigations
+- **Bootstrap SCSS breakage**: v5.3.4+ may have SCSS variable changes → Run `pre:styles-commons` and `pre:styles-components` builds to verify
+- **next-themes API changes**: v0.2 → v0.4 has breaking changes → Review changelog; test all 12 consuming files
+- **RegExp.escape() availability**: Ensure Node.js 24 V8 includes it → Verify with simple runtime test
+- **@aws-sdk transitive dependency changes**: Newer AWS SDK may pull different transitive deps → Monitor bundle size
+- **Build regression**: Any upgrade could break Turbopack build → Follow incremental upgrade strategy with build verification per package
+
+## Future Considerations (Out of Scope)
+
+### transpilePackages cleanup in next.config.ts
+- **Context**: `next.config.ts` defines `getTranspilePackages()` listing 60+ ESM-only packages to force Turbopack to bundle them instead of externalising. The original comment says: "listing ESM packages until experimental.esmExternals works correctly to avoid ERR_REQUIRE_ESM".
+- **Relationship to require(esm)**: `transpilePackages` and `require(esm)` solve different problems. `transpilePackages` prevents Turbopack from externalising packages during SSR; `require(esm)` allows Node.js to load ESM packages via `require()` at runtime. With Node.js 24's stable `require(esm)`, externalised ESM packages *should* load correctly in SSR, meaning some `transpilePackages` entries may become unnecessary.
+- **Why not now**: (1) Turbopack's `esmExternals` handling is still `experimental`; (2) removing entries shifts packages from bundled to externalised, which means they appear in `.next/node_modules/` and must be classified as `dependencies` per the `package-dependencies` rule; (3) 60+ packages need individual verification. This is a separate investigation with a large blast radius.
+- **Recommendation**: Track as a separate task. Test by removing a few low-risk entries (e.g., `bail`, `ccount`, `zwitch`) and checking whether SSR still works with Turbopack externalisation + Node.js 24 `require(esm)`.
+
+## References
+- [Bootstrap issue #39798](https://github.com/twbs/bootstrap/issues/39798) — modal header regression, fixed in v5.3.4
+- [next-themes issue #122](https://github.com/pacocoursey/next-themes/issues/122) — type error, resolved in v0.2.0
+- [next-themes issue #375](https://github.com/pacocoursey/next-themes/issues/375) — Next.js 16 cacheComponents bug
+- [TC39 RegExp.escape() Stage 4](https://socket.dev/blog/tc39-advances-3-proposals-to-stage-4-regexp-escaping-float16array-and-redeclarable-global-eval) — ES2026
+- [Node.js require(esm) stability](https://joyeecheung.github.io/blog/2025/12/30/require-esm-in-node-js-from-experiment-to-stability/) — stable since Node.js 22.12.0
+- [Handsontable license change](https://handsontable.com/docs/javascript-data-grid/software-license/) — proprietary since v7.0.0
+
+## Final Audit Summary (2026-03-23)
+
+| Package | Previous Version | New Version | Action | Rationale |
+|---------|-----------------|-------------|--------|-----------|
+| `@aws-sdk/client-s3` | `3.454.0` | `^3.1014.0` | Upgraded | Pinning comment was misleading; S3 client is independent of mongodb constraint |
+| `@aws-sdk/s3-request-presigner` | `3.454.0` | `^3.1014.0` | Upgraded | Same as above |
+| `bootstrap` | `=5.3.2` | `^5.3.8` | Upgraded | Bug #39798 fixed in v5.3.4; SCSS compilation verified |
+| `escape-string-regexp` | `^4.0.0` | Removed | Replaced | Native `RegExp.escape()` (ES2026, Node.js 24) eliminates the dependency |
+| `string-width` | `=4.2.2` | `^7.0.0` | Upgraded | Used only in @growi/editor (ESM context, Vite-bundled) |
+| `next-themes` | `^0.2.1` | `^0.4.6` | Upgraded | Original issue #122 was misattributed; only change needed: type import path |
+| `@keycloak/keycloak-admin-client` | `^18.0.0` | Unchanged | Deferred | API breaking changes (v18→v26) require separate migration effort |
+| `handsontable` | `=6.2.2` | Unchanged | Kept | v7.0.0+ is proprietary (non-MIT license) |
+| `@handsontable/react` | `=2.1.0` | Unchanged | Kept | Requires handsontable >= 7.0.0 |
+
+### Additional Changes
+
+- Added `RegExp.escape()` TypeScript type declarations in `apps/app/src/@types/`, `packages/core/src/@types/`, and `packages/remark-lsx/src/@types/` (awaiting TypeScript built-in support)
+- Updated `tsconfig.build.client.json` to include `src/@types/**/*.d.ts` for Next.js build compatibility
+- Updated `generate-children-regexp.spec.ts` test expectations for `RegExp.escape()` output (escapes spaces as `\x20`)
+- Removed `escape-string-regexp` from `transpilePackages` in `next.config.ts`
+- Updated `bootstrap` version across 5 packages: apps/app, packages/editor, packages/core-styles, packages/preset-themes, apps/slackbot-proxy
+- Updated `// comments for dependencies` to retain only `@keycloak` entry with updated reason

+ 22 - 0
.kiro/specs/upgrade-fixed-packages/spec.json

@@ -0,0 +1,22 @@
+{
+  "feature_name": "upgrade-fixed-packages",
+  "created_at": "2026-03-23T00:00:00Z",
+  "updated_at": "2026-03-23T00:00:00Z",
+  "language": "en",
+  "phase": "implementation-complete",
+  "approvals": {
+    "requirements": {
+      "generated": true,
+      "approved": true
+    },
+    "design": {
+      "generated": true,
+      "approved": true
+    },
+    "tasks": {
+      "generated": true,
+      "approved": true
+    }
+  },
+  "ready_for_implementation": true
+}

+ 89 - 0
.kiro/specs/upgrade-fixed-packages/tasks.md

@@ -0,0 +1,89 @@
+# Implementation Plan
+
+- [x] 1. Pre-implementation verification
+- [x] 1.1 Verify RegExp.escape() availability and TypeScript support
+  - Confirm `RegExp.escape()` is available at runtime in the project's Node.js 24 target
+  - Check whether TypeScript recognizes `RegExp.escape()` — may need `lib` config update or `@types/node` update
+  - If unavailable, fall back to upgrading `escape-string-regexp` to v5 with `require(esm)` instead
+  - _Requirements: 2.2_
+
+- [x] 1.2 Review next-themes v0.3.0 and v0.4.0 breaking API changes
+  - Read changelogs for v0.3.0 and v0.4.0 releases to identify breaking changes
+  - Map breaking changes to the 12 consuming files in apps/app
+  - Determine migration effort and document required code changes
+  - Confirm GROWI's Pages Router usage is unaffected by the cacheComponents bug (issue #375)
+  - _Requirements: 1.2_
+
+- [x] 2. Low-risk package upgrades
+- [x] 2.1 (P) Relax @aws-sdk version range
+  - Change `@aws-sdk/client-s3` from `3.454.0` to `^3.1014.0` in apps/app/package.json
+  - Change `@aws-sdk/s3-request-presigner` from `3.454.0` to `^3.1014.0`
+  - Update the misleading `"@aws-skd/*"` comment to reflect the actual reason or remove it
+  - Run `pnpm install` and verify build with `turbo run build --filter @growi/app`
+  - Run `turbo run test --filter @growi/app` to confirm no regressions
+  - _Requirements: 1.3, 4.1, 4.2, 4.4_
+
+- [x] 2.2 (P) Upgrade string-width in @growi/editor
+  - Update `string-width` from `=4.2.2` to `^7.0.0` in packages/editor/package.json
+  - Verify @growi/editor builds successfully (Vite, ESM context)
+  - Run `turbo run build --filter @growi/app` to confirm downstream build passes
+  - Run `turbo run test --filter @growi/app` to confirm no regressions
+  - Remove the `string-width` comment from apps/app/package.json `// comments for dependencies`
+  - _Requirements: 2.1, 2.3, 4.1, 4.2, 4.4_
+
+- [x] 3. Upgrade bootstrap to ^5.3.8
+  - Change `bootstrap` from `=5.3.2` to `^5.3.8` in apps/app/package.json and all other packages
+  - Run `pnpm install` to resolve the new version
+  - Run `pnpm run pre:styles-commons` and `pnpm run pre:styles-components` to verify SCSS compilation
+  - Run `turbo run build --filter @growi/app` to confirm Turbopack build passes
+  - Run `turbo run lint --filter @growi/app` to check for type or lint errors
+  - Run `turbo run test --filter @growi/app` to confirm no regressions
+  - Visually inspect modal headers if a dev server is available (original bug was modal header layout)
+  - Remove the `bootstrap` comment from `// comments for dependencies`
+  - If build or SCSS fails, revert and document the failure reason
+  - _Requirements: 1.1, 4.1, 4.2, 4.3, 4.4, 4.5_
+
+- [x] 4. Replace escape-string-regexp with native RegExp.escape()
+- [x] 4.1 Migrate all source files from escape-string-regexp to RegExp.escape()
+  - Replace `import escapeStringRegexp from 'escape-string-regexp'` and corresponding calls with `RegExp.escape()` in each file
+  - Files in apps/app/src: page.ts, page/index.ts, page-grant.ts, users.js, obsolete-page.js, openai.ts (6 files)
+  - Files in packages: core/src/utils/page-path-utils (2 files), remark-lsx/src/server/routes/list-pages/index.ts (1 file)
+  - Ensure each replacement preserves the exact same escaping behavior
+  - _Requirements: 4.1_
+
+- [x] 4.2 Remove escape-string-regexp dependency and verify
+  - Remove `escape-string-regexp` from apps/app/package.json dependencies
+  - Remove from packages/core and packages/remark-lsx package.json if listed
+  - Remove the `escape-string-regexp` comment from `// comments for dependencies`
+  - Remove `escape-string-regexp` entry from `transpilePackages` in next.config.ts
+  - Run `pnpm install` to update lockfile
+  - Run `turbo run build --filter @growi/app` to verify build
+  - Run `turbo run lint --filter @growi/app` to verify no type errors
+  - Run `turbo run test --filter @growi/app` to verify no regressions
+  - If RegExp.escape() has TypeScript issues, add type declaration or adjust lib config
+  - _Requirements: 2.1, 2.2, 4.1, 4.2, 4.3, 4.4, 4.5_
+
+- [x] 5. Upgrade next-themes to ^0.4.x
+- [x] 5.1 Update next-themes and adapt consuming code
+  - Change `next-themes` from `^0.2.1` to `^0.4.6` in apps/app/package.json
+  - Apply required API migration changes across the 12 consuming files identified in design
+  - Pay attention to any renamed exports, changed hook signatures, or provider prop changes
+  - Ensure `useTheme()` and `ThemeProvider` usage is compatible with v0.4.x API
+  - _Requirements: 1.2, 4.1_
+
+- [x] 5.2 Verify next-themes upgrade
+  - Run `turbo run build --filter @growi/app` to confirm build passes
+  - Run `turbo run lint --filter @growi/app` to check for type errors (original pinning was about types)
+  - Run `turbo run test --filter @growi/app` to confirm no regressions
+  - Remove the `next-themes` comment from `// comments for dependencies`
+  - If build or type errors occur, investigate whether the issue is the same as #122 or a new problem
+  - If upgrade fails, revert and document the reason; keep the pin with an updated comment
+  - _Requirements: 4.2, 4.3, 4.4, 4.5, 4.6_
+
+- [x] 6. Finalize audit documentation and comment blocks
+  - Verify `// comments for dependencies` block contains only packages that remain pinned (@keycloak if unchanged)
+  - Verify `// comments for defDependencies` block is accurate (handsontable entries unchanged)
+  - Update comment text to reflect current reasons where applicable
+  - Produce a final summary table in research.md documenting: package name, previous version, new version or "unchanged", and rationale
+  - Confirm all requirements are satisfied by reviewing the checklist against actual changes made
+  - _Requirements: 3.1, 3.2, 4.6, 5.1, 5.2, 5.3_

+ 5 - 1
.kiro/steering/structure.md

@@ -12,5 +12,9 @@ In full-stack packages (e.g., `apps/app`), server-side code (`src/server/`, mode
 
 
 For apps/app-specific examples and build tooling details, see `apps/app/.claude/skills/build-optimization/SKILL.md`.
 For apps/app-specific examples and build tooling details, see `apps/app/.claude/skills/build-optimization/SKILL.md`.
 
 
+### The positioning of @growi/core.
+
+See: `.claude/skills/monorepo-overview/SKILL.md` — "@growi/core — Domain & Utilities Hub" section
+
 ---
 ---
-_Updated: 2026-03-03. apps/app details moved to `apps/app/.claude/skills/build-optimization/SKILL.md`._
+_Updated: 2026-03-24. @growi/core details moved to monorepo-overview SKILL.md (auto-loaded)._

+ 1 - 1
apps/app/.claude/rules/package-dependencies.md

@@ -24,7 +24,7 @@ ls apps/app/.next/node_modules/ | grep <package-name>
 |---|---|
 |---|---|
 | `import foo from 'pkg'` at module level in SSR-executed code | `dependencies` |
 | `import foo from 'pkg'` at module level in SSR-executed code | `dependencies` |
 | `import type { Foo } from 'pkg'` only | `devDependencies` (type-erased at build) |
 | `import type { Foo } from 'pkg'` only | `devDependencies` (type-erased at build) |
-| `await import('pkg')` inside `useEffect` / event handler | Check `.next/node_modules/` — may still be externalised |
+| `await import('pkg')` inside `useEffect` / event handler | Check `.next/node_modules/` — may still be externalised (see `fix-broken-next-symlinks` skill) |
 | Used only in `*.spec.ts`, build scripts, or CI | `devDependencies` |
 | Used only in `*.spec.ts`, build scripts, or CI | `devDependencies` |
 
 
 ## Common Misconceptions
 ## Common Misconceptions

+ 90 - 0
apps/app/.claude/skills/learned/fix-broken-next-symlinks/SKILL.md

@@ -0,0 +1,90 @@
+---
+name: fix-broken-next-symlinks
+description: Fix broken symlinks in .next/node_modules/ — diagnose, decide allowlist vs dependencies, and verify
+---
+
+## IMPORTANT
+
+This document is a **mandatory step-by-step procedure**. When fixing broken symlinks, execute every step in order. In particular, verification **always** requires the full 3-command sequence: `build` → `assemble-prod.sh` → `check-next-symlinks.sh`. Never skip `assemble-prod.sh` — the symlink check is only meaningful after production assembly.
+
+## Problem
+
+Turbopack externalizes packages into `.next/node_modules/` as symlinks, even for packages imported only via dynamic `import()` inside `useEffect`. After `assemble-prod.sh` runs `pnpm deploy --prod`, `devDependencies` are excluded, breaking those symlinks. `check-next-symlinks.sh` detects these and fails the build.
+
+## Diagnosis
+
+### Step 1 — Reproduce locally
+
+```bash
+turbo run build --filter @growi/app
+bash apps/app/bin/assemble-prod.sh
+bash apps/app/bin/check-next-symlinks.sh
+```
+
+If the check reports `BROKEN: apps/app/.next/node_modules/<package>-<hash>`, proceed to Step 2.
+
+### Step 2 — Determine the fix
+
+Search all import sites of the broken package:
+
+```bash
+grep -rn "from ['\"]<package-name>['\"]" apps/app/src/
+grep -rn "import(['\"]<package-name>['\"])" apps/app/src/
+```
+
+Apply the decision tree:
+
+```
+Is the package imported ONLY via:
+  - `import type { ... } from 'pkg'`  (erased at compile time)
+  - `await import('pkg')` inside useEffect / event handler  (client-side only, never SSR)
+
+  YES → Add to ALLOWED_BROKEN in check-next-symlinks.sh  (Step 3a)
+  NO  → Move from devDependencies to dependencies          (Step 3b)
+```
+
+### Step 3a — Add to allowlist
+
+Edit `apps/app/bin/check-next-symlinks.sh`:
+
+```bash
+ALLOWED_BROKEN=(
+  fslightbox-react
+  @emoji-mart/data
+  @emoji-mart/react
+  socket.io-client
+  <new-package>          # <-- add here
+)
+```
+
+Use the bare package name (e.g., `socket.io-client`), not the hashed symlink name (`socket.io-client-46e5ba4d4c848156`).
+
+### Step 3b — Move to dependencies
+
+In `apps/app/package.json`, move the package from `devDependencies` to `dependencies`, then run `pnpm install`.
+
+### Step 4 — Verify the fix
+
+Re-run the full sequence:
+
+```bash
+turbo run build --filter @growi/app
+bash apps/app/bin/assemble-prod.sh
+bash apps/app/bin/check-next-symlinks.sh
+```
+
+Expected output: `OK: All apps/app/.next/node_modules symlinks resolve correctly.`
+
+## Example
+
+`socket.io-client` is used in two files:
+- `src/states/socket-io/global-socket.ts` — `import type` + `await import()` inside `useEffect`
+- `src/features/admin/states/socket-io.ts` — `import type` + `import()` inside `useEffect`
+
+Both are client-only dynamic imports → added to `ALLOWED_BROKEN`, stays as `devDependencies`.
+
+## When to Apply
+
+- CI fails at "Check for broken symlinks in .next/node_modules" step
+- `check-next-symlinks.sh` reports `BROKEN: apps/app/.next/node_modules/<package>-<hash>`
+- After adding a new package or changing import patterns in apps/app

+ 9 - 1
apps/app/bin/check-next-symlinks.sh

@@ -11,6 +11,7 @@ ALLOWED_BROKEN=(
   fslightbox-react
   fslightbox-react
   @emoji-mart/data
   @emoji-mart/data
   @emoji-mart/react
   @emoji-mart/react
+  socket.io-client
 )
 )
 
 
 # Build a grep -v pattern from the allowlist
 # Build a grep -v pattern from the allowlist
@@ -29,7 +30,14 @@ done | grep -v "${grep_args[@]}" || true)
 if [ -n "$broken" ]; then
 if [ -n "$broken" ]; then
   echo "ERROR: Broken symlinks found in $NEXT_MODULES:"
   echo "ERROR: Broken symlinks found in $NEXT_MODULES:"
   echo "$broken"
   echo "$broken"
-  echo "Move these packages from devDependencies to dependencies in apps/app/package.json."
+  echo ""
+  echo "Each broken package must be either:"
+  echo "  1. Moved from devDependencies to dependencies in apps/app/package.json"
+  echo "  2. Added to ALLOWED_BROKEN in this script (if only used via useEffect + dynamic import)"
+  echo ""
+  echo "Fix: Follow the step-by-step procedure in apps/app/.claude/skills/learned/fix-broken-next-symlinks/SKILL.md"
+  echo "     You MUST execute every step in order — do NOT skip assemble-prod.sh when verifying."
+  echo "Ref: apps/app/.claude/rules/package-dependencies.md"
   exit 1
   exit 1
 fi
 fi
 
 

+ 0 - 1
apps/app/next.config.ts

@@ -28,7 +28,6 @@ const getTranspilePackages = (): string[] => {
     'decode-named-character-reference',
     'decode-named-character-reference',
     'devlop',
     'devlop',
     'fault',
     'fault',
-    'escape-string-regexp',
     'hastscript',
     'hastscript',
     'html-void-elements',
     'html-void-elements',
     'is-absolute-url',
     'is-absolute-url',

+ 10 - 17
apps/app/package.json

@@ -56,16 +56,13 @@
     "version:premajor": "pnpm version premajor --preid=RC"
     "version:premajor": "pnpm version premajor --preid=RC"
   },
   },
   "// comments for dependencies": {
   "// comments for dependencies": {
-    "@aws-skd/*": "fix version above 3.186.0 that is required by mongodb@4.16.0",
-    "@keycloak/keycloak-admin-client": "19.0.0 or above exports only ESM.",
-    "escape-string-regexp": "5.0.0 or above exports only ESM",
-    "next-themes": "0.3.0 causes a type error: https://github.com/pacocoursey/next-themes/issues/122",
-    "string-width": "5.0.0 or above exports only ESM."
+    "@keycloak/keycloak-admin-client": "19.0.0 or above exports only ESM. API breaking changes require separate migration effort."
   },
   },
   "dependencies": {
   "dependencies": {
     "@akebifiky/remark-simple-plantuml": "^1.0.2",
     "@akebifiky/remark-simple-plantuml": "^1.0.2",
-    "@aws-sdk/client-s3": "3.454.0",
-    "@aws-sdk/s3-request-presigner": "3.454.0",
+    "@aws-sdk/client-s3": "^3.1014.0",
+    "@aws-sdk/lib-storage": "^3.1014.0",
+    "@aws-sdk/s3-request-presigner": "^3.1014.0",
     "@azure/identity": "^4.4.1",
     "@azure/identity": "^4.4.1",
     "@azure/openai": "^2.0.0",
     "@azure/openai": "^2.0.0",
     "@azure/storage-blob": "^12.16.0",
     "@azure/storage-blob": "^12.16.0",
@@ -128,7 +125,7 @@
     "axios-retry": "^3.2.4",
     "axios-retry": "^3.2.4",
     "babel-plugin-superjson-next": "^0.4.2",
     "babel-plugin-superjson-next": "^0.4.2",
     "body-parser": "^1.20.3",
     "body-parser": "^1.20.3",
-    "bootstrap": "=5.3.2",
+    "bootstrap": "^5.3.8",
     "browser-bunyan": "^1.8.0",
     "browser-bunyan": "^1.8.0",
     "bson-objectid": "^2.0.4",
     "bson-objectid": "^2.0.4",
     "bunyan": "^1.8.15",
     "bunyan": "^1.8.15",
@@ -151,7 +148,6 @@
     "dotenv-flow": "^3.2.0",
     "dotenv-flow": "^3.2.0",
     "downshift": "^8.2.3",
     "downshift": "^8.2.3",
     "ejs": "^3.1.10",
     "ejs": "^3.1.10",
-    "escape-string-regexp": "^4.0.0",
     "expose-gc": "^1.0.0",
     "expose-gc": "^1.0.0",
     "express": "^4.20.0",
     "express": "^4.20.0",
     "express-bunyan-logger": "^1.3.3",
     "express-bunyan-logger": "^1.3.3",
@@ -201,10 +197,10 @@
     "multer": "~1.4.0",
     "multer": "~1.4.0",
     "multer-autoreap": "^1.0.3",
     "multer-autoreap": "^1.0.3",
     "mustache": "^4.2.0",
     "mustache": "^4.2.0",
-    "next": "^16.1.7",
+    "next": "^16.2.1",
     "next-dynamic-loading-props": "^0.1.1",
     "next-dynamic-loading-props": "^0.1.1",
     "next-i18next": "^15.3.1",
     "next-i18next": "^15.3.1",
-    "next-themes": "^0.2.1",
+    "next-themes": "^0.4.6",
     "nocache": "^4.0.0",
     "nocache": "^4.0.0",
     "node-cron": "^3.0.2",
     "node-cron": "^3.0.2",
     "nodemailer": "^6.9.15",
     "nodemailer": "^6.9.15",
@@ -268,7 +264,7 @@
     "sanitize-filename": "^1.6.3",
     "sanitize-filename": "^1.6.3",
     "simplebar-react": "^2.3.6",
     "simplebar-react": "^2.3.6",
     "socket.io": "^4.7.5",
     "socket.io": "^4.7.5",
-    "string-width": "=4.2.2",
+    "string-width": "^7.0.0",
     "superjson": "^2.2.2",
     "superjson": "^2.2.2",
     "swagger-jsdoc": "^6.2.8",
     "swagger-jsdoc": "^6.2.8",
     "swr": "^2.3.2",
     "swr": "^2.3.2",
@@ -290,15 +286,13 @@
     "xss": "^1.0.15",
     "xss": "^1.0.15",
     "y-codemirror.next": "^0.3.5",
     "y-codemirror.next": "^0.3.5",
     "y-mongodb-provider": "^0.2.0",
     "y-mongodb-provider": "^0.2.0",
-    "y-socket.io": "^1.1.3",
+    "y-websocket": "^2.0.4",
     "yjs": "^13.6.18",
     "yjs": "^13.6.18",
     "zod": "^3.24.2"
     "zod": "^3.24.2"
   },
   },
   "// comments for defDependencies": {
   "// comments for defDependencies": {
-    "bootstrap": "v5.3.3 has a bug. refs: https://github.com/twbs/bootstrap/issues/39798",
     "@handsontable/react": "v3 requires handsontable >= 7.0.0.",
     "@handsontable/react": "v3 requires handsontable >= 7.0.0.",
-    "handsontable": "v7.0.0 or above is no loger MIT lisence.",
-    "mongodb": "mongoose which is used requires mongo@4.16.0."
+    "handsontable": "v7.0.0 or above is no loger MIT lisence."
   },
   },
   "devDependencies": {
   "devDependencies": {
     "@apidevtools/swagger-parser": "^10.1.1",
     "@apidevtools/swagger-parser": "^10.1.1",
@@ -354,7 +348,6 @@
     "supertest": "^7.1.4",
     "supertest": "^7.1.4",
     "swagger2openapi": "^7.0.8",
     "swagger2openapi": "^7.0.8",
     "tinykeys": "^3.0.0",
     "tinykeys": "^3.0.0",
-    "typescript": "~5.0.4",
     "unist-util-is": "^6.0.0",
     "unist-util-is": "^6.0.0",
     "unist-util-visit-parents": "^6.0.0"
     "unist-util-visit-parents": "^6.0.0"
   }
   }

+ 8 - 0
apps/app/public/static/locales/en_US/admin.json

@@ -881,6 +881,14 @@
     "available_action_list_explanation": "List of actions that can be searched/viewed in the current settings",
     "available_action_list_explanation": "List of actions that can be searched/viewed in the current settings",
     "action_list": "Action List",
     "action_list": "Action List",
     "disable_mode_explanation": "Audit log is currently disabled. To enable it, set the environment variable <code>AUDIT_LOG_ENABLED</code> to true.",
     "disable_mode_explanation": "Audit log is currently disabled. To enable it, set the environment variable <code>AUDIT_LOG_ENABLED</code> to true.",
+    "export": "Export",
+    "export_audit_log": "Export Audit Log",
+    "export_requested": "Export request accepted. You will be notified when the export is complete.",
+    "export_failed": "Failed to start export",
+    "duplicate_export_confirm": "An export with the same conditions is already in progress. Do you want to restart it?",
+    "restart_export": "Restart Export",
+    "confirm_export": "Confirm Export",
+    "disable_mode_explanation_cloud": "Audit log is currently disabled. To enable it, please update the app settings from the GROWI.cloud management screen.",
     "docs_url": {
     "docs_url": {
       "log_type": "https://docs.growi.org/en/admin-guide/admin-cookbook/audit-log-setup.html#log-types"
       "log_type": "https://docs.growi.org/en/admin-guide/admin-cookbook/audit-log-setup.html#log-types"
     }
     }

+ 5 - 0
apps/app/public/static/locales/en_US/translation.json

@@ -861,6 +861,11 @@
     "started_on": "Started on",
     "started_on": "Started on",
     "file_upload_not_configured": "File upload settings are not configured"
     "file_upload_not_configured": "File upload settings are not configured"
   },
   },
+  "audit_log_bulk_export": {
+    "download_expired": "Download period has expired",
+    "job_expired": "Export process was canceled because it took too long",
+    "no_results": "No audit logs matched the specified filters"
+  },
   "message": {
   "message": {
     "successfully_connected": "Successfully Connected!",
     "successfully_connected": "Successfully Connected!",
     "fail_to_save_access_token": "Failed to save access_token. Please try again.",
     "fail_to_save_access_token": "Failed to save access_token. Please try again.",

+ 8 - 0
apps/app/public/static/locales/fr_FR/admin.json

@@ -880,6 +880,14 @@
     "available_action_list_explanation": "Liste des actions pouvant être recherchées/vues",
     "available_action_list_explanation": "Liste des actions pouvant être recherchées/vues",
     "action_list": "Liste d'actions",
     "action_list": "Liste d'actions",
     "disable_mode_explanation": "Cette fonctionnalité est désactivée. Afin de l'activer, mettre à jour <code>AUDIT_LOG_ENABLED</code> pour true.",
     "disable_mode_explanation": "Cette fonctionnalité est désactivée. Afin de l'activer, mettre à jour <code>AUDIT_LOG_ENABLED</code> pour true.",
+    "export": "Exporter",
+    "export_audit_log": "Exporter le journal d'audit",
+    "export_requested": "Demande d'exportation acceptée. Vous serez averti lorsque l'exportation sera terminée.",
+    "export_failed": "Échec du démarrage de l'exportation",
+    "duplicate_export_confirm": "Une exportation avec les mêmes conditions est déjà en cours. Voulez-vous la redémarrer ?",
+    "restart_export": "Redémarrer l'exportation",
+    "confirm_export": "Confirmer l'exportation",
+    "disable_mode_explanation_cloud": "Le journal d'audit est actuellement désactivé. Pour l'activer, veuillez modifier les paramètres de l'application depuis l'écran de gestion GROWI.cloud.",
     "docs_url": {
     "docs_url": {
       "log_type": "https://docs.growi.org/en/admin-guide/admin-cookbook/audit-log-setup.html#log-types"
       "log_type": "https://docs.growi.org/en/admin-guide/admin-cookbook/audit-log-setup.html#log-types"
     }
     }

+ 5 - 0
apps/app/public/static/locales/fr_FR/translation.json

@@ -856,6 +856,11 @@
     "started_on": "Commencé le",
     "started_on": "Commencé le",
     "file_upload_not_configured": "Les paramètres de téléchargement de fichiers ne sont pas configurés"
     "file_upload_not_configured": "Les paramètres de téléchargement de fichiers ne sont pas configurés"
   },
   },
+  "audit_log_bulk_export": {
+    "download_expired": "La période de téléchargement a expiré",
+    "job_expired": "Le processus d'exportation a été annulé car il a pris trop de temps",
+    "no_results": "Aucun journal d'audit ne correspondait aux filtres spécifiés"
+  },
   "message": {
   "message": {
     "successfully_connected": "Connecté!",
     "successfully_connected": "Connecté!",
     "fail_to_save_access_token": "Échec de la sauvegarde de access_token.",
     "fail_to_save_access_token": "Échec de la sauvegarde de access_token.",

+ 8 - 0
apps/app/public/static/locales/ja_JP/admin.json

@@ -890,6 +890,14 @@
     "available_action_list_explanation": "現在の設定で検索 / 表示 可能なアクション一覧です",
     "available_action_list_explanation": "現在の設定で検索 / 表示 可能なアクション一覧です",
     "action_list": "アクション一覧",
     "action_list": "アクション一覧",
     "disable_mode_explanation": "現在、監査ログは無効になっています。有効にする場合は環境変数 <code>AUDIT_LOG_ENABLED</code> を true に設定してください。",
     "disable_mode_explanation": "現在、監査ログは無効になっています。有効にする場合は環境変数 <code>AUDIT_LOG_ENABLED</code> を true に設定してください。",
+    "export": "エクスポート",
+    "export_audit_log": "監査ログのエクスポート",
+    "export_requested": "エクスポートリクエストを受け付けました。完了後に通知されます。",
+    "export_failed": "エクスポートの開始に失敗しました",
+    "duplicate_export_confirm": "同じ条件のエクスポートが進行中です。やり直しますか?",
+    "restart_export": "やり直す",
+    "confirm_export": "エクスポートの確認",
+    "disable_mode_explanation_cloud": "現在、監査ログは無効になっています。有効にするには、GROWI.cloud の管理画面からアプリの設定を変更してください。",
     "docs_url": {
     "docs_url": {
       "log_type": "https://docs.growi.org/ja/admin-guide/admin-cookbook/audit-log-setup.html#log-types"
       "log_type": "https://docs.growi.org/ja/admin-guide/admin-cookbook/audit-log-setup.html#log-types"
     }
     }

+ 5 - 0
apps/app/public/static/locales/ja_JP/translation.json

@@ -894,6 +894,11 @@
     "started_on": "開始日時",
     "started_on": "開始日時",
     "file_upload_not_configured": "ファイルアップロード設定が完了していません"
     "file_upload_not_configured": "ファイルアップロード設定が完了していません"
   },
   },
+  "audit_log_bulk_export": {
+    "download_expired": "ダウンロード期限が切れました",
+    "job_expired": "エクスポート時間が長すぎるため、処理が中断されました",
+    "no_results": "指定されたフィルターに一致する監査ログはありませんでした"
+  },
   "message": {
   "message": {
     "successfully_connected": "接続に成功しました!",
     "successfully_connected": "接続に成功しました!",
     "fail_to_save_access_token": "アクセストークンの保存に失敗しました、再度お試しください。",
     "fail_to_save_access_token": "アクセストークンの保存に失敗しました、再度お試しください。",

+ 8 - 0
apps/app/public/static/locales/ko_KR/admin.json

@@ -881,6 +881,14 @@
     "available_action_list_explanation": "현재 설정에서 검색/볼 수 있는 작업 목록",
     "available_action_list_explanation": "현재 설정에서 검색/볼 수 있는 작업 목록",
     "action_list": "작업 목록",
     "action_list": "작업 목록",
     "disable_mode_explanation": "감사 로그가 현재 비활성화되어 있습니다. 활성화하려면 환경 변수 <code>AUDIT_LOG_ENABLED</code>를 true로 설정하십시오.",
     "disable_mode_explanation": "감사 로그가 현재 비활성화되어 있습니다. 활성화하려면 환경 변수 <code>AUDIT_LOG_ENABLED</code>를 true로 설정하십시오.",
+    "export": "내보내기",
+    "export_audit_log": "감사 로그 내보내기",
+    "export_requested": "내보내기 요청이 접수되었습니다. 내보내기가 완료되면 알림을 받게 됩니다.",
+    "export_failed": "내보내기 시작에 실패했습니다",
+    "duplicate_export_confirm": "동일한 조건의 내보내기가 이미 진행 중입니다. 다시 시작하시겠습니까?",
+    "restart_export": "내보내기 다시 시작",
+    "confirm_export": "내보내기 확인",
+    "disable_mode_explanation_cloud": "현재 감사 로그가 비활성화되어 있습니다. 활성화하려면 GROWI.cloud 관리 화면에서 앱 설정을 변경하십시오.",
     "docs_url": {
     "docs_url": {
       "log_type": "https://docs.growi.org/en/admin-guide/admin-cookbook/audit-log-setup.html#log-types"
       "log_type": "https://docs.growi.org/en/admin-guide/admin-cookbook/audit-log-setup.html#log-types"
     }
     }

+ 5 - 0
apps/app/public/static/locales/ko_KR/translation.json

@@ -821,6 +821,11 @@
     "started_on": "시작일",
     "started_on": "시작일",
     "file_upload_not_configured": "파일 업로드 설정이 구성되지 않았습니다."
     "file_upload_not_configured": "파일 업로드 설정이 구성되지 않았습니다."
   },
   },
+  "audit_log_bulk_export": {
+    "download_expired": "다운로드 기간이 만료되었습니다",
+    "job_expired": "수출 프로세스가 너무 오래 걸려 취소되었습니다",
+    "no_results": "지정된 필터에 일치하는 감사 로그가 없습니다"
+  },
   "message": {
   "message": {
     "successfully_connected": "성공적으로 연결되었습니다!",
     "successfully_connected": "성공적으로 연결되었습니다!",
     "fail_to_save_access_token": "액세스 토큰 저장 실패. 다시 시도하십시오.",
     "fail_to_save_access_token": "액세스 토큰 저장 실패. 다시 시도하십시오.",

+ 8 - 0
apps/app/public/static/locales/zh_CN/admin.json

@@ -890,6 +890,14 @@
     "available_action_list_explanation": "在当前配置中可以搜索/查看的行动列表",
     "available_action_list_explanation": "在当前配置中可以搜索/查看的行动列表",
     "action_list": "行动清单",
     "action_list": "行动清单",
     "disable_mode_explanation": "审计日志当前已禁用。 要启用它,请将环境变量 <code>AUDIT_LOG_ENABLED</code> 设置为 true。",
     "disable_mode_explanation": "审计日志当前已禁用。 要启用它,请将环境变量 <code>AUDIT_LOG_ENABLED</code> 设置为 true。",
+    "export": "导出",
+    "export_audit_log": "导出审核日志",
+    "export_requested": "导出请求已接受。导出完成后将通知您。",
+    "export_failed": "导出启动失败",
+    "duplicate_export_confirm": "已有相同条件的导出正在进行中。是否要重新启动它?",
+    "restart_export": "重新启动导出",
+    "confirm_export": "确认导出",
+    "disable_mode_explanation_cloud": "审计日志当前已禁用。要启用它,请从 GROWI.cloud 管理界面更改应用程序设置。",
     "docs_url": {
     "docs_url": {
       "log_type": "https://docs.growi.org/en/admin-guide/admin-cookbook/audit-log-setup.html#log-types"
       "log_type": "https://docs.growi.org/en/admin-guide/admin-cookbook/audit-log-setup.html#log-types"
     }
     }

+ 5 - 0
apps/app/public/static/locales/zh_CN/translation.json

@@ -866,6 +866,11 @@
     "started_on": "开始于",
     "started_on": "开始于",
     "file_upload_not_configured": "未配置文件上传设置"
     "file_upload_not_configured": "未配置文件上传设置"
   },
   },
+  "audit_log_bulk_export": {
+    "download_expired": "下载期限已过期",
+    "job_expired": "导出过程因耗时过长被取消",
+    "no_results": "没有审计日志符合指定筛选条件"
+  },
   "message": {
   "message": {
     "successfully_connected": "连接成功!",
     "successfully_connected": "连接成功!",
     "fail_to_save_access_token": "无法保存访问令牌。请再试一次。",
     "fail_to_save_access_token": "无法保存访问令牌。请再试一次。",

+ 21 - 1
apps/app/src/client/components/Admin/AuditLog/AuditLogDisableMode.tsx

@@ -2,9 +2,16 @@ import type { FC } from 'react';
 import React from 'react';
 import React from 'react';
 import { useTranslation } from 'react-i18next';
 import { useTranslation } from 'react-i18next';
 
 
+import { useGrowiAppIdForGrowiCloud, useGrowiCloudUri } from '~/states/global';
+
 export const AuditLogDisableMode: FC = () => {
 export const AuditLogDisableMode: FC = () => {
   const { t } = useTranslation('admin');
   const { t } = useTranslation('admin');
 
 
+  const growiCloudUri = useGrowiCloudUri();
+  const growiAppIdForGrowiCloud = useGrowiAppIdForGrowiCloud();
+
+  const isCloud = growiCloudUri != null && growiAppIdForGrowiCloud != null;
+
   return (
   return (
     <div className="ccontainer-lg">
     <div className="ccontainer-lg">
       <div className="container">
       <div className="container">
@@ -21,9 +28,22 @@ export const AuditLogDisableMode: FC = () => {
               <h3
               <h3
                 // biome-ignore lint/security/noDangerouslySetInnerHtml: trusted translation markup
                 // biome-ignore lint/security/noDangerouslySetInnerHtml: trusted translation markup
                 dangerouslySetInnerHTML={{
                 dangerouslySetInnerHTML={{
-                  __html: t('audit_log_management.disable_mode_explanation'),
+                  __html: t(
+                    isCloud
+                      ? 'audit_log_management.disable_mode_explanation_cloud'
+                      : 'audit_log_management.disable_mode_explanation',
+                  ),
                 }}
                 }}
               />
               />
+              {isCloud && (
+                <a
+                  href={`${growiCloudUri}/my/apps/${growiAppIdForGrowiCloud}`}
+                  className="btn btn-outline-secondary mt-3"
+                >
+                  <span className="material-symbols-outlined me-1">share</span>
+                  {t('cloud_setting_management.to_cloud_settings')}
+                </a>
+              )}
             </div>
             </div>
           </div>
           </div>
         </div>
         </div>

+ 182 - 0
apps/app/src/client/components/Admin/AuditLog/AuditLogExportModal.tsx

@@ -0,0 +1,182 @@
+import { useCallback, useState } from 'react';
+import { LoadingSpinner } from '@growi/ui/dist/components';
+import { useAtomValue } from 'jotai';
+import { useTranslation } from 'react-i18next';
+import { Modal, ModalBody, ModalFooter, ModalHeader } from 'reactstrap';
+
+import type { IAuditLogBulkExportRequestFilters } from '~/features/audit-log-bulk-export/interfaces/audit-log-bulk-export';
+import type { SupportedActionType } from '~/interfaces/activity';
+import { auditLogAvailableActionsAtom } from '~/states/server-configurations';
+
+import { DateRangePicker } from './DateRangePicker';
+import { DuplicateExportConfirmModal } from './DuplicateExportConfirmModal';
+import { SearchUsernameTypeahead } from './SearchUsernameTypeahead';
+import { SelectActionDropdown } from './SelectActionDropdown';
+import { useAuditLogExport } from './useAuditLogExport';
+
+type Props = {
+  isOpen: boolean;
+  onClose: () => void;
+};
+
+const AuditLogExportModalSubstance = ({
+  onClose,
+}: {
+  onClose: () => void;
+}): JSX.Element => {
+  const { t } = useTranslation('admin');
+
+  const auditLogAvailableActionsData = useAtomValue(
+    auditLogAvailableActionsAtom,
+  );
+
+  const [startDate, setStartDate] = useState<Date | null>(null);
+  const [endDate, setEndDate] = useState<Date | null>(null);
+  const [selectedUsernames, setSelectedUsernames] = useState<string[]>([]);
+  const [actionMap, setActionMap] = useState(
+    () =>
+      new Map<SupportedActionType, boolean>(
+        auditLogAvailableActionsData?.map((action) => [action, true]) ?? [],
+      ),
+  );
+
+  const datePickerChangedHandler = useCallback((dateList: Date[] | null[]) => {
+    setStartDate(dateList[0]);
+    setEndDate(dateList[1]);
+  }, []);
+
+  const actionCheckboxChangedHandler = useCallback(
+    (action: SupportedActionType) => {
+      setActionMap((prev) => {
+        const next = new Map(prev);
+        next.set(action, !next.get(action));
+        return next;
+      });
+    },
+    [],
+  );
+
+  const multipleActionCheckboxChangedHandler = useCallback(
+    (actions: SupportedActionType[], isChecked: boolean) => {
+      setActionMap((prev) => {
+        const next = new Map(prev);
+        actions.forEach((action) => {
+          next.set(action, isChecked);
+        });
+        return next;
+      });
+    },
+    [],
+  );
+
+  const setUsernamesHandler = useCallback((usernames: string[]) => {
+    setSelectedUsernames(usernames);
+  }, []);
+
+  const buildFilters = useCallback(() => {
+    const selectedActionList = Array.from(actionMap.entries())
+      .filter((v) => v[1])
+      .map((v) => v[0]);
+
+    const filters: IAuditLogBulkExportRequestFilters = {};
+
+    if (selectedUsernames.length > 0) {
+      filters.usernames = selectedUsernames;
+    }
+    if (selectedActionList.length > 0) {
+      filters.actions = selectedActionList;
+    }
+    if (startDate != null) {
+      filters.dateFrom = startDate;
+    }
+    if (endDate != null) {
+      const endOfDay = new Date(endDate);
+      endOfDay.setHours(23, 59, 59, 999);
+      filters.dateTo = endOfDay;
+    }
+
+    return filters;
+  }, [actionMap, selectedUsernames, startDate, endDate]);
+
+  const {
+    isExporting,
+    isDuplicateConfirmOpen,
+    exportHandler,
+    restartExportHandler,
+    closeDuplicateConfirm,
+  } = useAuditLogExport(buildFilters, onClose);
+
+  return (
+    <>
+      <ModalHeader tag="h4" toggle={onClose}>
+        {t('audit_log_management.export_audit_log')}
+      </ModalHeader>
+
+      <ModalBody>
+        <div className="mb-3">
+          <div className="form-label">{t('audit_log_management.username')}</div>
+          <SearchUsernameTypeahead onChange={setUsernamesHandler} />
+        </div>
+
+        <div className="mb-3">
+          <div className="form-label">{t('audit_log_management.date')}</div>
+          <DateRangePicker
+            startDate={startDate}
+            endDate={endDate}
+            onChange={datePickerChangedHandler}
+          />
+        </div>
+
+        <div className="mb-3">
+          <div className="form-label">{t('audit_log_management.action')}</div>
+          <SelectActionDropdown
+            actionMap={actionMap}
+            availableActions={auditLogAvailableActionsData || []}
+            onChangeAction={actionCheckboxChangedHandler}
+            onChangeMultipleAction={multipleActionCheckboxChangedHandler}
+          />
+        </div>
+      </ModalBody>
+
+      <ModalFooter>
+        <button
+          type="button"
+          className="btn btn-outline-secondary"
+          onClick={onClose}
+        >
+          {t('export_management.cancel')}
+        </button>
+        <button
+          type="button"
+          className="btn btn-primary"
+          onClick={exportHandler}
+          disabled={isExporting}
+        >
+          {isExporting ? (
+            <LoadingSpinner className="me-1 fs-3" />
+          ) : (
+            <span className="material-symbols-outlined me-1">download</span>
+          )}
+          {t('audit_log_management.export')}
+        </button>
+      </ModalFooter>
+
+      <DuplicateExportConfirmModal
+        isOpen={isDuplicateConfirmOpen}
+        onClose={closeDuplicateConfirm}
+        onRestart={restartExportHandler}
+      />
+    </>
+  );
+};
+
+export const AuditLogExportModal = ({
+  isOpen,
+  onClose,
+}: Props): JSX.Element => {
+  return (
+    <Modal isOpen={isOpen} toggle={onClose}>
+      {isOpen && <AuditLogExportModalSubstance onClose={onClose} />}
+    </Modal>
+  );
+};

+ 39 - 0
apps/app/src/client/components/Admin/AuditLog/DuplicateExportConfirmModal.tsx

@@ -0,0 +1,39 @@
+import { useTranslation } from 'react-i18next';
+import { Modal, ModalBody, ModalFooter, ModalHeader } from 'reactstrap';
+
+type Props = {
+  isOpen: boolean;
+  onClose: () => void;
+  onRestart: () => void;
+};
+
+export const DuplicateExportConfirmModal = ({
+  isOpen,
+  onClose,
+  onRestart,
+}: Props): JSX.Element => {
+  const { t } = useTranslation('admin');
+
+  return (
+    <Modal isOpen={isOpen} toggle={onClose}>
+      <ModalHeader tag="h4" toggle={onClose}>
+        {t('audit_log_management.confirm_export')}
+      </ModalHeader>
+      <ModalBody>
+        {t('audit_log_management.duplicate_export_confirm')}
+      </ModalBody>
+      <ModalFooter>
+        <button
+          type="button"
+          className="btn btn-outline-secondary"
+          onClick={onClose}
+        >
+          {t('export_management.cancel')}
+        </button>
+        <button type="button" className="btn btn-primary" onClick={onRestart}>
+          {t('audit_log_management.restart_export')}
+        </button>
+      </ModalFooter>
+    </Modal>
+  );
+};

+ 67 - 0
apps/app/src/client/components/Admin/AuditLog/useAuditLogExport.ts

@@ -0,0 +1,67 @@
+import { useCallback, useState } from 'react';
+import { useTranslation } from 'react-i18next';
+
+import { apiv3Post } from '~/client/util/apiv3-client';
+import { toastError, toastSuccess } from '~/client/util/toastr';
+import type { IAuditLogBulkExportFilters } from '~/features/audit-log-bulk-export/interfaces/audit-log-bulk-export';
+
+export const useAuditLogExport = (
+  buildFilters: () => IAuditLogBulkExportFilters,
+  onClose: () => void,
+) => {
+  const { t } = useTranslation('admin');
+
+  const [isExporting, setIsExporting] = useState(false);
+  const [isDuplicateConfirmOpen, setIsDuplicateConfirmOpen] = useState(false);
+
+  const exportHandler = useCallback(async () => {
+    setIsExporting(true);
+    try {
+      const filters = buildFilters();
+      await apiv3Post('/audit-log-bulk-export', { filters });
+      toastSuccess(t('audit_log_management.export_requested'));
+      onClose();
+    } catch (errs) {
+      const isDuplicate =
+        Array.isArray(errs) &&
+        errs.some(
+          (e) => e.code === 'audit_log_bulk_export.duplicate_export_job_error',
+        );
+
+      if (isDuplicate) {
+        setIsDuplicateConfirmOpen(true);
+      } else {
+        toastError(t('audit_log_management.export_failed'));
+      }
+    } finally {
+      setIsExporting(false);
+    }
+  }, [buildFilters, t, onClose]);
+
+  const restartExportHandler = useCallback(async () => {
+    setIsDuplicateConfirmOpen(false);
+    setIsExporting(true);
+    try {
+      const filters = buildFilters();
+      await apiv3Post('/audit-log-bulk-export', { filters, restartJob: true });
+      toastSuccess(t('audit_log_management.export_requested'));
+      onClose();
+    } catch {
+      toastError(t('audit_log_management.export_failed'));
+    } finally {
+      setIsExporting(false);
+    }
+  }, [buildFilters, t, onClose]);
+
+  const closeDuplicateConfirm = useCallback(() => {
+    setIsDuplicateConfirmOpen(false);
+  }, []);
+
+  return {
+    isExporting,
+    isDuplicateConfirmOpen,
+    exportHandler,
+    restartExportHandler,
+    closeDuplicateConfirm,
+  };
+};

+ 35 - 0
apps/app/src/client/components/Admin/AuditLogManagement.tsx

@@ -9,6 +9,7 @@ import { useTranslation } from 'react-i18next';
 import type { IClearable } from '~/client/interfaces/clearable';
 import type { IClearable } from '~/client/interfaces/clearable';
 import { toastError } from '~/client/util/toastr';
 import { toastError } from '~/client/util/toastr';
 import type { SupportedActionType } from '~/interfaces/activity';
 import type { SupportedActionType } from '~/interfaces/activity';
+import { useGrowiAppIdForGrowiCloud, useGrowiCloudUri } from '~/states/global';
 import {
 import {
   auditLogAvailableActionsAtom,
   auditLogAvailableActionsAtom,
   auditLogEnabledAtom,
   auditLogEnabledAtom,
@@ -18,6 +19,7 @@ import { useSWRxActivity } from '~/stores/activity';
 import PaginationWrapper from '../PaginationWrapper';
 import PaginationWrapper from '../PaginationWrapper';
 import { ActivityTable } from './AuditLog/ActivityTable';
 import { ActivityTable } from './AuditLog/ActivityTable';
 import { AuditLogDisableMode } from './AuditLog/AuditLogDisableMode';
 import { AuditLogDisableMode } from './AuditLog/AuditLogDisableMode';
+import { AuditLogExportModal } from './AuditLog/AuditLogExportModal';
 import { AuditLogSettings } from './AuditLog/AuditLogSettings';
 import { AuditLogSettings } from './AuditLog/AuditLogSettings';
 import { DateRangePicker } from './AuditLog/DateRangePicker';
 import { DateRangePicker } from './AuditLog/DateRangePicker';
 import { SearchUsernameTypeahead } from './AuditLog/SearchUsernameTypeahead';
 import { SearchUsernameTypeahead } from './AuditLog/SearchUsernameTypeahead';
@@ -35,6 +37,11 @@ const PAGING_LIMIT = 10;
 export const AuditLogManagement: FC = () => {
 export const AuditLogManagement: FC = () => {
   const { t } = useTranslation('admin');
   const { t } = useTranslation('admin');
 
 
+  const growiCloudUri = useGrowiCloudUri();
+  const growiAppIdForGrowiCloud = useGrowiAppIdForGrowiCloud();
+
+  const isCloud = growiCloudUri != null && growiAppIdForGrowiCloud != null;
+
   const typeaheadRef = useRef<IClearable>(null);
   const typeaheadRef = useRef<IClearable>(null);
 
 
   const auditLogAvailableActionsData = useAtomValue(
   const auditLogAvailableActionsData = useAtomValue(
@@ -185,6 +192,8 @@ export const AuditLogManagement: FC = () => {
     setActivePageNumber(jumpPageNumber);
     setActivePageNumber(jumpPageNumber);
   }, [jumpPageNumber]);
   }, [jumpPageNumber]);
 
 
+  const [isExportModalOpen, setIsExportModalOpen] = useState<boolean>(false);
+
   const startIndex = activityList.length === 0 ? 0 : offset + 1;
   const startIndex = activityList.length === 0 ? 0 : offset + 1;
   const endIndex = activityList.length === 0 ? 0 : offset + activityList.length;
   const endIndex = activityList.length === 0 ? 0 : offset + activityList.length;
 
 
@@ -212,6 +221,16 @@ export const AuditLogManagement: FC = () => {
         )}
         )}
       </button>
       </button>
 
 
+      {isCloud && (
+        <a
+          href={`${growiCloudUri}/my/apps/${growiAppIdForGrowiCloud}`}
+          className="btn btn-outline-secondary mb-4 ms-2"
+        >
+          <span className="material-symbols-outlined me-1">share</span>
+          {t('cloud_setting_management.to_cloud_settings')}
+        </a>
+      )}
+
       <h2 className="admin-setting-header mb-3">
       <h2 className="admin-setting-header mb-3">
         <span>
         <span>
           {isSettingPage
           {isSettingPage
@@ -267,6 +286,17 @@ export const AuditLogManagement: FC = () => {
                 {t('admin:audit_log_management.clear')}
                 {t('admin:audit_log_management.clear')}
               </button>
               </button>
             </div>
             </div>
+
+            <div className="col-12">
+              <button
+                type="button"
+                className="btn btn-outline-secondary"
+                onClick={() => setIsExportModalOpen(true)}
+              >
+                <span className="material-symbols-outlined me-1">download</span>
+                {t('admin:audit_log_management.export')}
+              </button>
+            </div>
           </div>
           </div>
 
 
           <p className="ms-2">
           <p className="ms-2">
@@ -315,6 +345,11 @@ export const AuditLogManagement: FC = () => {
               </button>
               </button>
             </div>
             </div>
           </div>
           </div>
+
+          <AuditLogExportModal
+            isOpen={isExportModalOpen}
+            onClose={() => setIsExportModalOpen(false)}
+          />
         </>
         </>
       )}
       )}
     </div>
     </div>

+ 11 - 0
apps/app/src/client/components/Admin/MarkdownSetting/ContentDispositionSettings.tsx

@@ -3,6 +3,8 @@ import { useCallback, useEffect, useState } from 'react';
 import { useTranslation } from 'next-i18next';
 import { useTranslation } from 'next-i18next';
 import { useForm } from 'react-hook-form';
 import { useForm } from 'react-hook-form';
 
 
+import { toastError, toastSuccess } from '~/client/util/toastr';
+
 import {
 import {
   type ContentDispositionSettings as ContentDispositionSettingsType,
   type ContentDispositionSettings as ContentDispositionSettingsType,
   useContentDisposition,
   useContentDisposition,
@@ -136,9 +138,18 @@ const ContentDispositionSettings: React.FC = () => {
     try {
     try {
       setError(null);
       setError(null);
       await updateSettings(data);
       await updateSettings(data);
+
+      toastSuccess(
+        t('toaster.update_successed', {
+          target: t('markdown_settings.content-disposition_header'),
+          ns: 'commons',
+        }),
+      );
+
       reset(data);
       reset(data);
     } catch (err) {
     } catch (err) {
       setError((err as Error).message);
       setError((err as Error).message);
+      toastError(err);
     }
     }
   };
   };
 
 

+ 98 - 0
apps/app/src/client/components/InAppNotification/ModelNotification/AuditLogBulkExportJobModelNotification.tsx

@@ -0,0 +1,98 @@
+import React from 'react';
+import { type HasObjectId, isPopulated } from '@growi/core';
+import { useTranslation } from 'react-i18next';
+
+import type { IAuditLogBulkExportJobHasId } from '~/features/audit-log-bulk-export/interfaces/audit-log-bulk-export';
+import { SupportedAction, SupportedTargetModel } from '~/interfaces/activity';
+import type { IInAppNotification } from '~/interfaces/in-app-notification';
+
+import type { ModelNotificationUtils } from '.';
+import { ModelNotification } from './ModelNotification';
+import { useActionMsgAndIconForModelNotification } from './useActionAndMsg';
+
+export const useAuditLogBulkExportJobModelNotification = (
+  notification: IInAppNotification & HasObjectId,
+): ModelNotificationUtils | null => {
+  const { t } = useTranslation();
+  const { actionMsg, actionIcon } =
+    useActionMsgAndIconForModelNotification(notification);
+
+  const isAuditLogBulkExportJobModelNotification = (
+    notification: IInAppNotification & HasObjectId,
+  ): notification is IInAppNotification<IAuditLogBulkExportJobHasId> &
+    HasObjectId => {
+    return (
+      notification.targetModel ===
+      SupportedTargetModel.MODEL_AUDIT_LOG_BULK_EXPORT_JOB
+    );
+  };
+
+  if (!isAuditLogBulkExportJobModelNotification(notification)) {
+    return null;
+  }
+
+  const actionUsers = notification.user.username;
+
+  const getSubMsg = (): JSX.Element => {
+    if (
+      notification.action ===
+        SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_COMPLETED &&
+      notification.target == null
+    ) {
+      return (
+        <div className="text-danger">
+          <small>{t('audit_log_bulk_export.download_expired')}</small>
+        </div>
+      );
+    }
+    if (
+      notification.action ===
+      SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_JOB_EXPIRED
+    ) {
+      return (
+        <div className="text-danger">
+          <small>{t('audit_log_bulk_export.job_expired')}</small>
+        </div>
+      );
+    }
+    if (
+      notification.action ===
+      SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_NO_RESULTS
+    ) {
+      return (
+        <div className="text-danger">
+          <small>{t('audit_log_bulk_export.no_results')}</small>
+        </div>
+      );
+    }
+    return <></>;
+  };
+
+  const Notification = () => {
+    return (
+      <ModelNotification
+        notification={notification}
+        actionMsg={actionMsg}
+        actionIcon={actionIcon}
+        actionUsers={actionUsers}
+        hideActionUsers
+        hidePath
+        subMsg={getSubMsg()}
+      />
+    );
+  };
+
+  const clickLink =
+    notification.action ===
+      SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_COMPLETED &&
+    notification.target?.attachment != null &&
+    isPopulated(notification.target?.attachment)
+      ? notification.target.attachment.downloadPathProxied
+      : undefined;
+
+  return {
+    Notification,
+    clickLink,
+    isDisabled: notification.target == null,
+  };
+};

+ 5 - 1
apps/app/src/client/components/InAppNotification/ModelNotification/ModelNotification.tsx

@@ -15,6 +15,7 @@ type Props = {
   actionIcon: string;
   actionIcon: string;
   actionUsers: string;
   actionUsers: string;
   hideActionUsers?: boolean;
   hideActionUsers?: boolean;
+  hidePath?: boolean;
   subMsg?: JSX.Element;
   subMsg?: JSX.Element;
 };
 };
 
 
@@ -24,6 +25,7 @@ export const ModelNotification: FC<Props> = ({
   actionIcon,
   actionIcon,
   actionUsers,
   actionUsers,
   hideActionUsers = false,
   hideActionUsers = false,
+  hidePath = false,
   subMsg,
   subMsg,
 }: Props) => {
 }: Props) => {
   return (
   return (
@@ -31,7 +33,9 @@ export const ModelNotification: FC<Props> = ({
       <div className="text-truncate page-title">
       <div className="text-truncate page-title">
         {hideActionUsers ? <></> : <b>{actionUsers}</b>}
         {hideActionUsers ? <></> : <b>{actionUsers}</b>}
         {` ${actionMsg}`}
         {` ${actionMsg}`}
-        <PagePathLabel path={notification.parsedSnapshot?.path ?? ''} />
+        {!hidePath && (
+          <PagePathLabel path={notification.parsedSnapshot?.path ?? ''} />
+        )}
       </div>
       </div>
       {subMsg}
       {subMsg}
       <span className="material-symbols-outlined me-2">{actionIcon}</span>
       <span className="material-symbols-outlined me-2">{actionIcon}</span>

+ 5 - 1
apps/app/src/client/components/InAppNotification/ModelNotification/index.tsx

@@ -3,6 +3,7 @@ import type { HasObjectId } from '@growi/core';
 
 
 import type { IInAppNotification } from '~/interfaces/in-app-notification';
 import type { IInAppNotification } from '~/interfaces/in-app-notification';
 
 
+import { useAuditLogBulkExportJobModelNotification } from './AuditLogBulkExportJobModelNotification';
 import { usePageBulkExportJobModelNotification } from './PageBulkExportJobModelNotification';
 import { usePageBulkExportJobModelNotification } from './PageBulkExportJobModelNotification';
 import { usePageModelNotification } from './PageModelNotification';
 import { usePageModelNotification } from './PageModelNotification';
 import { useUserModelNotification } from './UserModelNotification';
 import { useUserModelNotification } from './UserModelNotification';
@@ -23,11 +24,14 @@ export const useModelNotification = (
   const userModelNotificationUtils = useUserModelNotification(notification);
   const userModelNotificationUtils = useUserModelNotification(notification);
   const pageBulkExportResultModelNotificationUtils =
   const pageBulkExportResultModelNotificationUtils =
     usePageBulkExportJobModelNotification(notification);
     usePageBulkExportJobModelNotification(notification);
+  const auditLogBulkExportJobModelNotificationUtils =
+    useAuditLogBulkExportJobModelNotification(notification);
 
 
   const modelNotificationUtils =
   const modelNotificationUtils =
     pageModelNotificationUtils ??
     pageModelNotificationUtils ??
     userModelNotificationUtils ??
     userModelNotificationUtils ??
-    pageBulkExportResultModelNotificationUtils;
+    pageBulkExportResultModelNotificationUtils ??
+    auditLogBulkExportJobModelNotificationUtils;
 
 
   return modelNotificationUtils;
   return modelNotificationUtils;
 };
 };

+ 13 - 0
apps/app/src/client/components/InAppNotification/ModelNotification/useActionAndMsg.ts

@@ -81,6 +81,19 @@ export const useActionMsgAndIconForModelNotification = (
       actionMsg = 'export failed for';
       actionMsg = 'export failed for';
       actionIcon = 'error';
       actionIcon = 'error';
       break;
       break;
+    case SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_COMPLETED:
+      actionMsg = 'audit log export completed';
+      actionIcon = 'download';
+      break;
+    case SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_FAILED:
+    case SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_JOB_EXPIRED:
+      actionMsg = 'audit log export failed';
+      actionIcon = 'error';
+      break;
+    case SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_NO_RESULTS:
+      actionMsg = 'audit log export had no results';
+      actionIcon = 'error';
+      break;
     default:
     default:
       actionMsg = '';
       actionMsg = '';
       actionIcon = '';
       actionIcon = '';

+ 6 - 9
apps/app/src/components/Script/DrawioViewerScript/DrawioViewerScript.tsx

@@ -1,5 +1,5 @@
 import { type JSX, useCallback } from 'react';
 import { type JSX, useCallback } from 'react';
-import Head from 'next/head';
+import Script from 'next/script';
 import type { IGraphViewerGlobal } from '@growi/remark-drawio';
 import type { IGraphViewerGlobal } from '@growi/remark-drawio';
 
 
 import { generateViewerMinJsUrl } from './use-viewer-min-js-url';
 import { generateViewerMinJsUrl } from './use-viewer-min-js-url';
@@ -40,13 +40,10 @@ export const DrawioViewerScript = ({ drawioUri }: Props): JSX.Element => {
   const viewerMinJsSrc = generateViewerMinJsUrl(drawioUri);
   const viewerMinJsSrc = generateViewerMinJsUrl(drawioUri);
 
 
   return (
   return (
-    <Head>
-      <script
-        type="text/javascript"
-        async
-        src={viewerMinJsSrc}
-        onLoad={loadedHandler}
-      />
-    </Head>
+    <Script
+      src={viewerMinJsSrc}
+      strategy="afterInteractive"
+      onLoad={loadedHandler}
+    />
   );
   );
 };
 };

+ 56 - 0
apps/app/src/features/audit-log-bulk-export/interfaces/audit-log-bulk-export.ts

@@ -0,0 +1,56 @@
+import type { HasObjectId, IAttachment, IUser, Ref } from '@growi/core';
+
+import type { SupportedActionType } from '~/interfaces/activity';
+
+export const AuditLogBulkExportFormat = {
+  json: 'json',
+} as const;
+
+export type AuditLogBulkExportFormat =
+  (typeof AuditLogBulkExportFormat)[keyof typeof AuditLogBulkExportFormat];
+
+export const AuditLogBulkExportJobInProgressJobStatus = {
+  exporting: 'exporting',
+  uploading: 'uploading',
+} as const;
+
+export const AuditLogBulkExportJobStatus = {
+  ...AuditLogBulkExportJobInProgressJobStatus,
+  completed: 'completed',
+  failed: 'failed',
+} as const;
+
+export type AuditLogBulkExportJobStatus =
+  (typeof AuditLogBulkExportJobStatus)[keyof typeof AuditLogBulkExportJobStatus];
+
+export interface IAuditLogBulkExportRequestFilters {
+  usernames?: string[];
+  actions?: SupportedActionType[];
+  dateFrom?: Date;
+  dateTo?: Date;
+}
+export interface IAuditLogBulkExportFilters {
+  users?: Array<Ref<IUser>>;
+  actions?: SupportedActionType[];
+  dateFrom?: Date;
+  dateTo?: Date;
+}
+
+export interface IAuditLogBulkExportJob {
+  user: Ref<IUser>; // user who initiated the audit log export job
+  filters: IAuditLogBulkExportFilters; // filter conditions used for export (e.g. user, action, date range)
+  filterHash: string; // hash string generated from the filter set to detect duplicate export jobs
+  format: AuditLogBulkExportFormat; // export file format (currently only 'json' is supported)
+  status: AuditLogBulkExportJobStatus; // current status of the export job
+  lastExportedId?: string; // ID of the last exported audit log record
+  completedAt?: Date | null; // the date when the job was completed
+  restartFlag: boolean; // flag indicating whether this job is a restarted one
+  totalExportedCount?: number; // total number of exported audit log entries
+  createdAt?: Date;
+  updatedAt?: Date;
+  attachment?: Ref<IAttachment>;
+}
+
+export interface IAuditLogBulkExportJobHasId
+  extends IAuditLogBulkExportJob,
+    HasObjectId {}

+ 55 - 0
apps/app/src/features/audit-log-bulk-export/server/models/audit-log-bulk-export-job.ts

@@ -0,0 +1,55 @@
+import type { HydratedDocument } from 'mongoose';
+import { type Model, Schema } from 'mongoose';
+
+import { AllSupportedActions } from '~/interfaces/activity';
+import { getOrCreateModel } from '~/server/util/mongoose-utils';
+
+import type { IAuditLogBulkExportJob } from '../../interfaces/audit-log-bulk-export';
+import {
+  AuditLogBulkExportFormat,
+  AuditLogBulkExportJobStatus,
+} from '../../interfaces/audit-log-bulk-export';
+
+export type AuditLogBulkExportJobDocument =
+  HydratedDocument<IAuditLogBulkExportJob>;
+
+export type AuditLogBulkExportJobModel = Model<AuditLogBulkExportJobDocument>;
+
+const auditLogBulkExportJobSchema = new Schema<IAuditLogBulkExportJob>(
+  {
+    user: { type: Schema.Types.ObjectId, ref: 'User', required: true },
+    filters: {
+      type: {
+        users: [{ type: Schema.Types.ObjectId, ref: 'User' }],
+        actions: [{ type: String, enum: AllSupportedActions }],
+        dateFrom: { type: Date },
+        dateTo: { type: Date },
+      },
+      required: true,
+    },
+    filterHash: { type: String, required: true, index: true },
+    format: {
+      type: String,
+      enum: Object.values(AuditLogBulkExportFormat),
+      required: true,
+      default: AuditLogBulkExportFormat.json,
+    },
+    status: {
+      type: String,
+      enum: Object.values(AuditLogBulkExportJobStatus),
+      required: true,
+      default: AuditLogBulkExportJobStatus.exporting,
+    },
+    lastExportedId: { type: String },
+    completedAt: { type: Date },
+    restartFlag: { type: Boolean, required: true, default: false },
+    totalExportedCount: { type: Number, default: 0 },
+    attachment: { type: Schema.Types.ObjectId, ref: 'Attachment' },
+  },
+  { timestamps: true },
+);
+
+export default getOrCreateModel<
+  AuditLogBulkExportJobDocument,
+  AuditLogBulkExportJobModel
+>('AuditLogBulkExportJob', auditLogBulkExportJobSchema);

+ 299 - 0
apps/app/src/features/audit-log-bulk-export/server/routes/apiv3/audit-log-bulk-export.integ.ts

@@ -0,0 +1,299 @@
+import express, {
+  type NextFunction,
+  type Request,
+  type Response,
+} from 'express';
+import request from 'supertest';
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
+
+import type Crowi from '~/server/crowi';
+import type { ApiV3Response } from '~/server/routes/apiv3/interfaces/apiv3-response';
+
+import * as ServiceModule from '../../service/audit-log-bulk-export';
+import { auditLogBulkExportService } from '../../service/audit-log-bulk-export';
+import { factory } from './audit-log-bulk-export';
+
+vi.mock('~/server/middlewares/login-required', () => ({
+  default: () => (_req: Request, _res: Response, next: NextFunction) => {
+    next();
+  },
+}));
+
+vi.mock('~/server/middlewares/apiv3-form-validator', () => {
+  const { validationResult } = require('express-validator');
+  return {
+    apiV3FormValidator: (req: Request, res: Response, next: NextFunction) => {
+      const errors = validationResult(req);
+      if (!errors.isEmpty()) {
+        const validationErrors = errors
+          .array()
+          .map((err: { param: string; msg: string }) => ({
+            message: `${err.param}: ${err.msg}`,
+            code: 'validation_failed',
+          }));
+        return (res as ApiV3Response).apiv3Err(validationErrors, 400);
+      }
+      return next();
+    },
+  };
+});
+
+vi.mock('../../service/audit-log-bulk-export', async () => {
+  const actual = await import('../../service/audit-log-bulk-export');
+  return {
+    ...actual,
+    auditLogBulkExportService: {
+      createOrResetExportJob: vi.fn(),
+    },
+  };
+});
+
+function buildCrowi(): Crowi {
+  const accessTokenParser =
+    () =>
+    (
+      req: Request & { user?: { _id: string } },
+      _res: Response,
+      next: NextFunction,
+    ) => {
+      req.user = { _id: '6561a1a1a1a1a1a1a1a1a1a1' };
+      next();
+    };
+
+  return { accessTokenParser } as unknown as Crowi;
+}
+
+function withApiV3Helpers(app: express.Express) {
+  app.use((_req, res, next) => {
+    (res as ApiV3Response).apiv3 = (body: unknown, status = 200) =>
+      res.status(status).json(body);
+
+    (res as ApiV3Response).apiv3Err = (
+      _err: unknown,
+      status = 500,
+      info?: unknown,
+    ) => {
+      let errors = Array.isArray(_err) ? _err : [_err];
+
+      errors = errors.map((e: unknown) => {
+        if (e && typeof e === 'object' && 'message' in e && 'code' in e) {
+          return e;
+        }
+        return e;
+      });
+
+      return res.status(status).json({ errors, info });
+    };
+
+    next();
+  });
+}
+
+function buildApp() {
+  const app = express();
+  app.use(express.json());
+  withApiV3Helpers(app);
+  const crowi = buildCrowi();
+  const router = factory(crowi);
+  app.use('/_api/v3/audit-log-bulk-export', router);
+  return app;
+}
+
+describe('POST /_api/v3/audit-log-bulk-export', () => {
+  const createOrReset =
+    auditLogBulkExportService.createOrResetExportJob as unknown as ReturnType<
+      typeof vi.fn
+    >;
+
+  beforeEach(() => {
+    vi.clearAllMocks();
+  });
+
+  afterEach(() => {
+    vi.restoreAllMocks();
+  });
+
+  it('returns 201 with jobId on success', async () => {
+    createOrReset.mockResolvedValueOnce('job-123');
+
+    const app = buildApp();
+    const res = await request(app)
+      .post('/_api/v3/audit-log-bulk-export')
+      .send({
+        filters: { actions: ['PAGE_VIEW'] },
+        restartJob: false,
+      });
+
+    expect(res.status).toBe(201);
+    expect(res.body).toEqual({ jobId: 'job-123' });
+
+    expect(createOrReset).toHaveBeenCalledTimes(1);
+    const [filters, format, userId, restartJob] = createOrReset.mock.calls[0];
+
+    expect(filters).toEqual({ actions: ['PAGE_VIEW'] });
+    expect(format).toBe('json');
+    expect(userId).toBeDefined();
+    expect(restartJob).toBe(false);
+  });
+
+  it('returns 409 with proper error code when DuplicateAuditLogBulkExportJobError is thrown', async () => {
+    const DuplicateErrCtor =
+      (
+        ServiceModule as {
+          DuplicateAuditLogBulkExportJobError?: new (
+            ...args: unknown[]
+          ) => Error;
+        }
+      ).DuplicateAuditLogBulkExportJobError ?? (() => {});
+    const err = Object.create(DuplicateErrCtor.prototype);
+    err.message = 'Duplicate audit-log bulk export job is in progress';
+    err.code = 'audit_log_bulk_export.duplicate_export_job_error';
+    err.duplicateJob = { createdAt: new Date('2025-10-01T00:00:00Z') };
+
+    createOrReset.mockRejectedValueOnce(err);
+
+    const app = buildApp();
+    const res = await request(app)
+      .post('/_api/v3/audit-log-bulk-export')
+      .send({
+        filters: { actions: ['PAGE_VIEW'] },
+      });
+
+    expect(res.status).toBe(409);
+    expect(res.body?.errors).toBeDefined();
+    expect(res.body?.errors?.[0]?.code).toBe(
+      'audit_log_bulk_export.duplicate_export_job_error',
+    );
+    expect(res.body?.errors?.[0]?.args?.duplicateJob?.createdAt).toBeDefined();
+  });
+
+  it('returns 500 with proper error code when unexpected error occurs', async () => {
+    createOrReset.mockRejectedValueOnce(new Error('boom'));
+
+    const app = buildApp();
+    const res = await request(app)
+      .post('/_api/v3/audit-log-bulk-export')
+      .send({
+        filters: { actions: ['PAGE_VIEW'] },
+      });
+
+    expect(res.status).toBe(500);
+    expect(res.body?.errors).toBeDefined();
+    expect(res.body?.errors?.[0]?.code).toBe(
+      'audit_log_bulk_export.failed_to_export',
+    );
+  });
+
+  describe('validation tests', () => {
+    it('returns 400 when filters is missing', async () => {
+      const app = buildApp();
+      const res = await request(app)
+        .post('/_api/v3/audit-log-bulk-export')
+        .send({});
+
+      expect(res.status).toBe(400);
+      expect(res.body?.errors).toBeDefined();
+    });
+
+    it('returns 400 when filters is not an object', async () => {
+      const app = buildApp();
+      const res = await request(app)
+        .post('/_api/v3/audit-log-bulk-export')
+        .send({
+          filters: 'invalid',
+        });
+
+      expect(res.status).toBe(400);
+      expect(res.body?.errors).toBeDefined();
+    });
+
+    it('returns 400 when usernames contains non-string values', async () => {
+      const app = buildApp();
+      const res = await request(app)
+        .post('/_api/v3/audit-log-bulk-export')
+        .send({
+          filters: {
+            usernames: [123, 456],
+          },
+        });
+
+      expect(res.status).toBe(400);
+      expect(res.body?.errors).toBeDefined();
+    });
+
+    it('returns 400 when actions contains invalid action', async () => {
+      const app = buildApp();
+      const res = await request(app)
+        .post('/_api/v3/audit-log-bulk-export')
+        .send({
+          filters: {
+            actions: ['invalid-action'],
+          },
+        });
+
+      expect(res.status).toBe(400);
+      expect(res.body?.errors).toBeDefined();
+    });
+
+    it('returns 400 when dateFrom is not a valid ISO date', async () => {
+      const app = buildApp();
+      const res = await request(app)
+        .post('/_api/v3/audit-log-bulk-export')
+        .send({
+          filters: {
+            dateFrom: 'invalid-date',
+          },
+        });
+
+      expect(res.status).toBe(400);
+      expect(res.body?.errors).toBeDefined();
+    });
+
+    it('returns 400 when format is invalid', async () => {
+      const app = buildApp();
+      const res = await request(app)
+        .post('/_api/v3/audit-log-bulk-export')
+        .send({
+          filters: { actions: ['PAGE_VIEW'] },
+          format: 'invalid-format',
+        });
+
+      expect(res.status).toBe(400);
+      expect(res.body?.errors).toBeDefined();
+    });
+
+    it('returns 400 when restartJob is not boolean', async () => {
+      const app = buildApp();
+      const res = await request(app)
+        .post('/_api/v3/audit-log-bulk-export')
+        .send({
+          filters: { actions: ['PAGE_VIEW'] },
+          restartJob: 'not-boolean',
+        });
+
+      expect(res.status).toBe(400);
+      expect(res.body?.errors).toBeDefined();
+    });
+
+    it('accepts valid request with all optional fields', async () => {
+      createOrReset.mockResolvedValueOnce('job-456');
+
+      const app = buildApp();
+      const res = await request(app)
+        .post('/_api/v3/audit-log-bulk-export')
+        .send({
+          filters: {
+            users: ['6561a1a1a1a1a1a1a1a1a1a1'],
+            actions: ['PAGE_VIEW', 'PAGE_CREATE'],
+            dateFrom: '2023-01-01T00:00:00Z',
+            dateTo: '2023-12-31T23:59:59Z',
+          },
+          format: 'json',
+          restartJob: true,
+        });
+
+      expect(res.status).toBe(201);
+      expect(res.body?.jobId).toBe('job-456');
+    });
+  });
+});

+ 117 - 0
apps/app/src/features/audit-log-bulk-export/server/routes/apiv3/audit-log-bulk-export.ts

@@ -0,0 +1,117 @@
+import type { IUserHasId } from '@growi/core';
+import { SCOPE } from '@growi/core/dist/interfaces';
+import { ErrorV3 } from '@growi/core/dist/models';
+import type { Request } from 'express';
+import { Router } from 'express';
+import { body } from 'express-validator';
+
+import { AuditLogBulkExportFormat } from '~/features/audit-log-bulk-export/interfaces/audit-log-bulk-export';
+import type { SupportedActionType } from '~/interfaces/activity';
+import { AllSupportedActions } from '~/interfaces/activity';
+import type Crowi from '~/server/crowi';
+import { apiV3FormValidator } from '~/server/middlewares/apiv3-form-validator';
+import loginRequiredFactory from '~/server/middlewares/login-required';
+import type { ApiV3Response } from '~/server/routes/apiv3/interfaces/apiv3-response';
+import loggerFactory from '~/utils/logger';
+
+import {
+  auditLogBulkExportService,
+  DuplicateAuditLogBulkExportJobError,
+} from '../../service/audit-log-bulk-export';
+
+const logger = loggerFactory('growi:routes:apiv3:audit-log-bulk-export');
+
+const router = Router();
+
+interface AuditLogExportReqBody {
+  filters: {
+    usernames?: string[];
+    actions?: SupportedActionType[];
+    dateFrom?: Date;
+    dateTo?: Date;
+  };
+  format?: (typeof AuditLogBulkExportFormat)[keyof typeof AuditLogBulkExportFormat];
+  restartJob?: boolean;
+}
+interface AuthorizedRequest
+  extends Request<undefined, ApiV3Response, AuditLogExportReqBody> {
+  user?: IUserHasId;
+}
+
+export const factory = (crowi: Crowi): Router => {
+  const accessTokenParser = crowi.accessTokenParser;
+  const loginRequiredStrictly = loginRequiredFactory(crowi);
+
+  const validators = {
+    auditLogBulkExport: [
+      body('filters').exists({ checkFalsy: true }).isObject(),
+      body('filters.usernames').optional({ nullable: true }).isArray(),
+      body('filters.usernames.*').optional({ nullable: true }).isString(),
+      body('filters.actions').optional({ nullable: true }).isArray(),
+      body('filters.actions.*')
+        .optional({ nullable: true })
+        .isString()
+        .isIn(AllSupportedActions),
+      body('filters.dateFrom')
+        .optional({ nullable: true })
+        .isISO8601()
+        .toDate(),
+      body('filters.dateTo').optional({ nullable: true }).isISO8601().toDate(),
+      body('format')
+        .optional({ nullable: true })
+        .isString()
+        .isIn(Object.values(AuditLogBulkExportFormat)),
+      body('restartJob').isBoolean().optional(),
+    ],
+  };
+  router.post(
+    '/',
+    accessTokenParser([SCOPE.WRITE.ADMIN.AUDIT_LOG]),
+    loginRequiredStrictly,
+    validators.auditLogBulkExport,
+    apiV3FormValidator,
+    async (req: AuthorizedRequest, res: ApiV3Response) => {
+      const {
+        filters,
+        format = AuditLogBulkExportFormat.json,
+        restartJob,
+      } = req.body;
+
+      try {
+        const jobId = await auditLogBulkExportService.createOrResetExportJob(
+          filters,
+          format,
+          req.user?._id,
+          restartJob,
+        );
+        return res.apiv3({ jobId }, 201);
+      } catch (err) {
+        logger.error(err);
+
+        if (err instanceof DuplicateAuditLogBulkExportJobError) {
+          return res.apiv3Err(
+            new ErrorV3(
+              'Duplicate audit-log bulk export job is in progress',
+              'audit_log_bulk_export.duplicate_export_job_error',
+              undefined,
+              {
+                duplicateJob: {
+                  createdAt: err.duplicateJob.createdAt,
+                },
+              },
+            ),
+            409,
+          );
+        }
+
+        return res.apiv3Err(
+          new ErrorV3(
+            'Failed to start audit-log bulk export',
+            'audit_log_bulk_export.failed_to_export',
+          ),
+        );
+      }
+    },
+  );
+  return router;
+};

+ 1 - 0
apps/app/src/features/audit-log-bulk-export/server/routes/apiv3/index.ts

@@ -0,0 +1 @@
+export { factory } from './audit-log-bulk-export';

+ 234 - 0
apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-clean-up-cron.integ.ts

@@ -0,0 +1,234 @@
+import type { IUser } from '@growi/core';
+import mongoose from 'mongoose';
+
+import type Crowi from '~/server/crowi';
+import { configManager } from '~/server/service/config-manager';
+
+import {
+  AuditLogBulkExportFormat,
+  AuditLogBulkExportJobStatus,
+} from '../../interfaces/audit-log-bulk-export';
+import AuditLogBulkExportJob from '../models/audit-log-bulk-export-job';
+import instantiateAuditLogBulkExportJobCleanUpCronService, {
+  auditLogBulkExportJobCleanUpCronService,
+} from './audit-log-bulk-export-job-clean-up-cron';
+
+const userSchema = new mongoose.Schema(
+  {
+    name: { type: String },
+    username: { type: String, required: true, unique: true },
+    email: { type: String, unique: true, sparse: true },
+  },
+  {
+    timestamps: true,
+  },
+);
+const User = mongoose.model<IUser>('User', userSchema);
+
+vi.mock('./audit-log-bulk-export-job-cron', () => {
+  return {
+    auditLogBulkExportJobCronService: {
+      cleanUpExportJobResources: vi.fn(() => Promise.resolve()),
+      notifyExportResultAndCleanUp: vi.fn(() => Promise.resolve()),
+    },
+  };
+});
+
+describe('AuditLogBulkExportJobCleanUpCronService', () => {
+  const crowi = {} as Crowi;
+  let user: IUser;
+
+  beforeAll(async () => {
+    await configManager.loadConfigs();
+    user = await User.create({
+      name: 'Example for AuditLogBulkExportJobCleanUpCronService Test',
+      username: 'audit log bulk export job cleanup cron test user',
+      email: 'auditLogBulkExportCleanUpCronTestUser@example.com',
+    });
+    instantiateAuditLogBulkExportJobCleanUpCronService(crowi);
+  });
+
+  beforeEach(async () => {
+    await AuditLogBulkExportJob.deleteMany();
+  });
+
+  describe('deleteExpiredExportJobs', () => {
+    const jobId1 = new mongoose.Types.ObjectId();
+    const jobId2 = new mongoose.Types.ObjectId();
+    const jobId3 = new mongoose.Types.ObjectId();
+    const jobId4 = new mongoose.Types.ObjectId();
+    beforeEach(async () => {
+      await configManager.updateConfig(
+        'app:bulkExportJobExpirationSeconds',
+        86400,
+      );
+
+      await AuditLogBulkExportJob.insertMany([
+        {
+          _id: jobId1,
+          user,
+          filters: {},
+          filterHash: 'hash1',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          restartFlag: false,
+          createdAt: new Date(Date.now()),
+        },
+        {
+          _id: jobId2,
+          user,
+          filters: {},
+          filterHash: 'hash2',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          restartFlag: false,
+          createdAt: new Date(Date.now() - 86400 * 1000 - 1),
+        },
+        {
+          _id: jobId3,
+          user,
+          filters: {},
+          filterHash: 'hash3',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.uploading,
+          restartFlag: false,
+          createdAt: new Date(Date.now() - 86400 * 1000 - 2),
+        },
+        {
+          _id: jobId4,
+          user,
+          filters: {},
+          filterHash: 'hash4',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.failed,
+          restartFlag: false,
+        },
+      ]);
+    });
+
+    test('should delete expired jobs', async () => {
+      expect(await AuditLogBulkExportJob.find()).toHaveLength(4);
+
+      await auditLogBulkExportJobCleanUpCronService?.deleteExpiredExportJobs();
+      const jobs = await AuditLogBulkExportJob.find();
+
+      expect(jobs).toHaveLength(2);
+      expect(jobs.map((job) => job._id).sort()).toStrictEqual(
+        [jobId1, jobId4].sort(),
+      );
+    });
+  });
+
+  describe('deleteDownloadExpiredExportJobs', () => {
+    const jobId1 = new mongoose.Types.ObjectId();
+    const jobId2 = new mongoose.Types.ObjectId();
+    const jobId3 = new mongoose.Types.ObjectId();
+    const jobId4 = new mongoose.Types.ObjectId();
+    beforeEach(async () => {
+      await configManager.updateConfig(
+        'app:bulkExportDownloadExpirationSeconds',
+        86400,
+      );
+
+      await AuditLogBulkExportJob.insertMany([
+        {
+          _id: jobId1,
+          user,
+          filters: {},
+          filterHash: 'hash1',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.completed,
+          restartFlag: false,
+          completedAt: new Date(Date.now()),
+        },
+        {
+          _id: jobId2,
+          user,
+          filters: {},
+          filterHash: 'hash2',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.completed,
+          restartFlag: false,
+          completedAt: new Date(Date.now() - 86400 * 1000 - 1),
+        },
+        {
+          _id: jobId3,
+          user,
+          filters: {},
+          filterHash: 'hash3',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          restartFlag: false,
+        },
+        {
+          _id: jobId4,
+          user,
+          filters: {},
+          filterHash: 'hash4',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.failed,
+          restartFlag: false,
+        },
+      ]);
+    });
+
+    test('should delete download expired jobs', async () => {
+      expect(await AuditLogBulkExportJob.find()).toHaveLength(4);
+
+      await auditLogBulkExportJobCleanUpCronService?.deleteDownloadExpiredExportJobs();
+      const jobs = await AuditLogBulkExportJob.find();
+
+      expect(jobs).toHaveLength(3);
+      expect(jobs.map((job) => job._id).sort()).toStrictEqual(
+        [jobId1, jobId3, jobId4].sort(),
+      );
+    });
+  });
+
+  describe('deleteFailedExportJobs', () => {
+    const jobId1 = new mongoose.Types.ObjectId();
+    const jobId2 = new mongoose.Types.ObjectId();
+    const jobId3 = new mongoose.Types.ObjectId();
+    beforeEach(async () => {
+      await AuditLogBulkExportJob.insertMany([
+        {
+          _id: jobId1,
+          user,
+          filters: {},
+          filterHash: 'hash1',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.failed,
+          restartFlag: false,
+        },
+        {
+          _id: jobId2,
+          user,
+          filters: {},
+          filterHash: 'hash2',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          restartFlag: false,
+        },
+        {
+          _id: jobId3,
+          user,
+          filters: {},
+          filterHash: 'hash3',
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.failed,
+          restartFlag: false,
+        },
+      ]);
+    });
+
+    test('should delete failed export jobs', async () => {
+      expect(await AuditLogBulkExportJob.find()).toHaveLength(3);
+
+      await auditLogBulkExportJobCleanUpCronService?.deleteFailedExportJobs();
+      const jobs = await AuditLogBulkExportJob.find();
+
+      expect(jobs).toHaveLength(1);
+      expect(jobs.map((job) => job._id)).toStrictEqual([jobId2]);
+    });
+  });
+});

+ 155 - 0
apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-clean-up-cron.ts

@@ -0,0 +1,155 @@
+import type { HydratedDocument } from 'mongoose';
+
+import type Crowi from '~/server/crowi';
+import { configManager } from '~/server/service/config-manager';
+import CronService from '~/server/service/cron';
+import loggerFactory from '~/utils/logger';
+
+import {
+  AuditLogBulkExportJobInProgressJobStatus,
+  AuditLogBulkExportJobStatus,
+} from '../../interfaces/audit-log-bulk-export';
+import type { AuditLogBulkExportJobDocument } from '../models/audit-log-bulk-export-job';
+import AuditLogBulkExportJob from '../models/audit-log-bulk-export-job';
+import { auditLogBulkExportJobCronService } from './audit-log-bulk-export-job-cron';
+
+const logger = loggerFactory(
+  'growi:service:audit-log-bulk-export-job-clean-up-cron',
+);
+
+/**
+ * Manages cronjob which deletes unnecessary audit log bulk export jobs
+ */
+class AuditLogBulkExportJobCleanUpCronService extends CronService {
+  crowi: Crowi;
+
+  constructor(crowi: Crowi) {
+    super();
+    this.crowi = crowi;
+  }
+
+  override getCronSchedule(): string {
+    return '0 */6 * * *';
+  }
+
+  override async executeJob(): Promise<void> {
+    await this.deleteExpiredExportJobs();
+    await this.deleteDownloadExpiredExportJobs();
+    await this.deleteFailedExportJobs();
+  }
+
+  /**
+   * Delete audit log bulk export jobs which are on-going and has passed the limit time for execution
+   */
+  async deleteExpiredExportJobs() {
+    const exportJobExpirationSeconds = configManager.getConfig(
+      'app:bulkExportJobExpirationSeconds',
+    );
+
+    const thresholdDate = new Date(
+      Date.now() - exportJobExpirationSeconds * 1000,
+    );
+
+    const expiredExportJobs = await AuditLogBulkExportJob.find({
+      $or: Object.values(AuditLogBulkExportJobInProgressJobStatus).map(
+        (status) => ({
+          status,
+        }),
+      ),
+      createdAt: {
+        $lt: thresholdDate,
+      },
+    });
+
+    if (auditLogBulkExportJobCronService != null) {
+      await this.cleanUpAndDeleteBulkExportJobs(
+        expiredExportJobs,
+        auditLogBulkExportJobCronService.cleanUpExportJobResources.bind(
+          auditLogBulkExportJobCronService,
+        ),
+      );
+    }
+  }
+
+  /**
+   * Delete audit log bulk export jobs which have completed but the due time for downloading has passed
+   */
+  async deleteDownloadExpiredExportJobs() {
+    const downloadExpirationSeconds = configManager.getConfig(
+      'app:bulkExportDownloadExpirationSeconds',
+    );
+    const thresholdDate = new Date(
+      Date.now() - downloadExpirationSeconds * 1000,
+    );
+
+    const downloadExpiredExportJobs = await AuditLogBulkExportJob.find({
+      status: AuditLogBulkExportJobStatus.completed,
+      completedAt: { $lt: thresholdDate },
+    });
+
+    const cleanUp = async (job: AuditLogBulkExportJobDocument) => {
+      await auditLogBulkExportJobCronService?.cleanUpExportJobResources(job);
+
+      const hasSameAttachmentAndDownloadNotExpired =
+        await AuditLogBulkExportJob.findOne({
+          attachment: job.attachment,
+          _id: { $ne: job._id },
+          completedAt: { $gte: thresholdDate },
+        });
+      if (hasSameAttachmentAndDownloadNotExpired == null) {
+        await this.crowi.attachmentService?.removeAttachment(job.attachment);
+      }
+    };
+
+    await this.cleanUpAndDeleteBulkExportJobs(
+      downloadExpiredExportJobs,
+      cleanUp,
+    );
+  }
+
+  /**
+   * Delete audit log bulk export jobs which have failed
+   */
+  async deleteFailedExportJobs() {
+    const failedExportJobs = await AuditLogBulkExportJob.find({
+      status: AuditLogBulkExportJobStatus.failed,
+    });
+
+    if (auditLogBulkExportJobCronService != null) {
+      await this.cleanUpAndDeleteBulkExportJobs(
+        failedExportJobs,
+        auditLogBulkExportJobCronService.cleanUpExportJobResources.bind(
+          auditLogBulkExportJobCronService,
+        ),
+      );
+    }
+  }
+
+  async cleanUpAndDeleteBulkExportJobs(
+    auditLogBulkExportJobs: HydratedDocument<AuditLogBulkExportJobDocument>[],
+    cleanUp: (job: AuditLogBulkExportJobDocument) => Promise<void>,
+  ): Promise<void> {
+    const results = await Promise.allSettled(
+      auditLogBulkExportJobs.map((job) => cleanUp(job)),
+    );
+    results.forEach((result) => {
+      if (result.status === 'rejected') logger.error(result.reason);
+    });
+
+    const cleanedUpJobs = auditLogBulkExportJobs.filter(
+      (_, index) => results[index].status === 'fulfilled',
+    );
+    if (cleanedUpJobs.length > 0) {
+      const cleanedUpJobIds = cleanedUpJobs.map((job) => job._id);
+      await AuditLogBulkExportJob.deleteMany({ _id: { $in: cleanedUpJobIds } });
+    }
+  }
+}
+
+export let auditLogBulkExportJobCleanUpCronService:
+  | AuditLogBulkExportJobCleanUpCronService
+  | undefined;
+export default function instantiate(crowi: Crowi): void {
+  auditLogBulkExportJobCleanUpCronService =
+    new AuditLogBulkExportJobCleanUpCronService(crowi);
+}

+ 751 - 0
apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/audit-log-bulk-export-job-cron-service.integ.ts

@@ -0,0 +1,751 @@
+import fs from 'node:fs';
+import path from 'node:path';
+import { PassThrough } from 'node:stream';
+import { pipeline } from 'node:stream/promises';
+import type { IUser } from '@growi/core';
+import mongoose from 'mongoose';
+import type { MockedFunction } from 'vitest';
+import {
+  afterAll,
+  afterEach,
+  beforeAll,
+  beforeEach,
+  describe,
+  expect,
+  it,
+  vi,
+} from 'vitest';
+
+import { SupportedAction } from '~/interfaces/activity';
+import type Crowi from '~/server/crowi';
+import { ResponseMode } from '~/server/interfaces/attachment';
+import Activity, { type ActivityDocument } from '~/server/models/activity';
+import type { IAttachmentDocument } from '~/server/models/attachment';
+import { Attachment } from '~/server/models/attachment';
+import { configManager } from '~/server/service/config-manager';
+import type { FileUploader } from '~/server/service/file-uploader/file-uploader';
+import { MultipartUploader } from '~/server/service/file-uploader/multipart-uploader';
+
+import {
+  AuditLogBulkExportFormat,
+  AuditLogBulkExportJobStatus,
+} from '../../../interfaces/audit-log-bulk-export';
+import AuditLogBulkExportJob, {
+  type AuditLogBulkExportJobDocument,
+} from '../../models/audit-log-bulk-export-job';
+import {
+  AuditLogBulkExportJobExpiredError,
+  AuditLogBulkExportJobRestartedError,
+} from './errors';
+import instanciateAuditLogBulkExportJobCronService, {
+  auditLogBulkExportJobCronService,
+} from './index';
+
+type ExportedActivityData = Pick<
+  ActivityDocument,
+  '_id' | 'action' | 'user'
+> & {
+  createdAt: Date;
+};
+
+const userSchema = new mongoose.Schema(
+  {
+    name: { type: String },
+    username: { type: String, required: true, unique: true },
+    email: { type: String, unique: true, sparse: true },
+  },
+  {
+    timestamps: true,
+  },
+);
+const User = mongoose.model<IUser>('User', userSchema);
+
+async function waitForCondition(
+  condition: () => boolean | Promise<boolean>,
+  {
+    timeoutMs = 2000,
+    intervalMs = 50,
+  }: { timeoutMs?: number; intervalMs?: number } = {},
+): Promise<void> {
+  const start = Date.now();
+
+  while (true) {
+    if (await condition()) return;
+
+    if (Date.now() - start > timeoutMs) {
+      throw new Error('waitForCondition: timeout exceeded');
+    }
+
+    await new Promise((resolve) => setTimeout(resolve, intervalMs));
+  }
+}
+
+async function waitForJobStatus(
+  jobId: mongoose.Types.ObjectId,
+  status: AuditLogBulkExportJobStatus,
+): Promise<AuditLogBulkExportJobDocument> {
+  let latest: AuditLogBulkExportJobDocument | null = null;
+
+  await waitForCondition(async () => {
+    latest = await AuditLogBulkExportJob.findById(jobId);
+    return latest?.status === status;
+  });
+
+  if (!latest) {
+    throw new Error('Job not found after waitForCondition succeeded');
+  }
+  return latest;
+}
+
+class MockMultipartUploader extends MultipartUploader {
+  override get uploadId(): string {
+    return 'mock-upload-id';
+  }
+
+  override async initUpload(): Promise<void> {}
+  override async uploadPart(
+    _part: Buffer,
+    _partNumber: number,
+  ): Promise<void> {}
+  override async completeUpload(): Promise<void> {}
+  override async abortUpload(): Promise<void> {}
+  override async getUploadedFileSize(): Promise<number> {
+    return 0;
+  }
+}
+
+const mockFileUploadService: FileUploader = {
+  uploadAttachment: vi.fn(),
+  getIsUploadable: vi.fn(() => true),
+  isWritable: vi.fn(() => Promise.resolve(true)),
+  getIsReadable: vi.fn(() => true),
+  isValidUploadSettings: vi.fn(() => true),
+  getFileUploadEnabled: vi.fn(() => true),
+  listFiles: vi.fn(() => []),
+  saveFile: vi.fn(() => Promise.resolve()),
+  deleteFile: vi.fn(),
+  deleteFiles: vi.fn(),
+  getFileUploadTotalLimit: vi.fn(() => 1024 * 1024 * 1024),
+  getTotalFileSize: vi.fn(() => Promise.resolve(0)),
+  checkLimit: vi.fn(() => Promise.resolve({ isUploadable: true })),
+  determineResponseMode: vi.fn(() => ResponseMode.REDIRECT),
+  respond: vi.fn(),
+  findDeliveryFile: vi.fn(() => Promise.resolve(new PassThrough())),
+  generateTemporaryUrl: vi.fn(() =>
+    Promise.resolve({ url: 'mock-url', lifetimeSec: 3600 }),
+  ),
+  createMultipartUploader: vi.fn(
+    (uploadKey: string, maxPartSize: number) =>
+      new MockMultipartUploader(uploadKey, maxPartSize),
+  ),
+  abortPreviousMultipartUpload: vi.fn(() => Promise.resolve()),
+};
+
+const mockActivityService = {
+  createActivity: vi.fn(() => Promise.resolve({ _id: 'mock-activity-id' })),
+};
+
+const mockEventEmitter = {
+  emit: vi.fn(),
+};
+
+type MockCrowi = Pick<Crowi, 'fileUploadService'> & {
+  events: { activity: typeof mockEventEmitter };
+  activityService: typeof mockActivityService;
+};
+
+const createMockCrowi = (): MockCrowi => ({
+  fileUploadService: mockFileUploadService,
+  events: { activity: mockEventEmitter },
+  activityService: mockActivityService,
+});
+
+describe('AuditLogBulkExportJobCronService Integration Test', () => {
+  let cronService: NonNullable<typeof auditLogBulkExportJobCronService>;
+  let crowi: MockCrowi;
+  let testUser: IUser & mongoose.Document;
+  let testTmpDir: string;
+  let uploadAttachmentSpy: MockedFunction<
+    (
+      readable: NodeJS.ReadableStream,
+      attachment: IAttachmentDocument,
+    ) => Promise<void>
+  >;
+
+  const testActivities = [
+    {
+      action: SupportedAction.ACTION_PAGE_CREATE,
+      user: null,
+      createdAt: new Date('2023-01-01T10:00:00Z'),
+      snapshot: { username: 'testuser' },
+    },
+    {
+      action: SupportedAction.ACTION_PAGE_UPDATE,
+      user: null,
+      createdAt: new Date('2023-01-02T10:00:00Z'),
+      snapshot: { username: 'testuser' },
+    },
+    {
+      action: SupportedAction.ACTION_PAGE_DELETE,
+      user: null,
+      createdAt: new Date('2023-01-03T10:00:00Z'),
+      snapshot: { username: 'testuser' },
+    },
+    ...Array.from({ length: 50 }, (_, i) => {
+      const baseDate = new Date('2023-01-04T10:00:00Z');
+      const activityDate = new Date(baseDate.getTime() + i * 60000);
+      return {
+        action: SupportedAction.ACTION_PAGE_VIEW,
+        user: null,
+        createdAt: activityDate,
+        snapshot: { username: 'testuser' },
+      };
+    }),
+  ];
+
+  beforeAll(async () => {
+    await configManager.loadConfigs();
+
+    testUser = await User.create({
+      name: 'Test User for Audit Log Export',
+      username: 'auditlogexportcrontest',
+      email: 'auditlogexportcrontest@example.com',
+    });
+
+    testActivities.forEach((activity) => {
+      activity.user = testUser._id;
+    });
+  });
+
+  beforeEach(async () => {
+    crowi = createMockCrowi();
+    instanciateAuditLogBulkExportJobCronService(crowi as unknown as Crowi);
+    if (!auditLogBulkExportJobCronService) {
+      throw new Error('auditLogBulkExportJobCronService was not initialized');
+    }
+    cronService = auditLogBulkExportJobCronService;
+
+    testTmpDir = fs.mkdtempSync(path.join('/tmp', 'audit-log-export-test-'));
+    cronService.tmpOutputRootDir = testTmpDir;
+
+    cronService.maxLogsPerFile = 10;
+    cronService.pageBatchSize = 5;
+
+    uploadAttachmentSpy = vi
+      .fn()
+      .mockImplementation(
+        async (
+          readable: NodeJS.ReadableStream,
+          attachment: IAttachmentDocument,
+        ) => {
+          const passThrough = new PassThrough();
+          let totalSize = 0;
+
+          passThrough.on('data', (chunk) => {
+            totalSize += chunk.length;
+          });
+
+          await pipeline(readable, passThrough);
+
+          attachment.fileSize = totalSize;
+        },
+      );
+    mockFileUploadService.uploadAttachment = uploadAttachmentSpy;
+
+    await Activity.insertMany(testActivities);
+  });
+
+  afterEach(async () => {
+    await Activity.deleteMany({});
+    await AuditLogBulkExportJob.deleteMany({});
+    await Attachment.deleteMany({});
+
+    if (fs.existsSync(testTmpDir)) {
+      fs.rmSync(testTmpDir, { recursive: true, force: true });
+    }
+
+    vi.clearAllMocks();
+  });
+
+  afterAll(async () => {
+    await User.deleteOne({ _id: testUser._id });
+  });
+
+  describe('1. Basic Operations (Happy Path)', () => {
+    describe('1-1. No Filter → Export → ZIP → Upload', () => {
+      it('should export all activities, create JSON files, and upload ZIP', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'test-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        await cronService.proceedBulkExportJob(job);
+        const afterExport = await waitForJobStatus(
+          job._id,
+          AuditLogBulkExportJobStatus.uploading,
+        );
+
+        const outputDir = cronService.getTmpOutputDir(afterExport);
+        let hasFiles = false;
+        let jsonFiles: string[] = [];
+
+        if (fs.existsSync(outputDir)) {
+          const files = fs.readdirSync(outputDir);
+          jsonFiles = files.filter((file) => file.endsWith('.json'));
+          hasFiles = jsonFiles.length > 0;
+        }
+
+        if (hasFiles) {
+          expect(jsonFiles.length).toBeGreaterThan(0);
+
+          const firstFile = path.join(outputDir, jsonFiles[0]);
+          const content = JSON.parse(fs.readFileSync(firstFile, 'utf8'));
+          expect(Array.isArray(content)).toBe(true);
+          expect(content.length).toBeLessThanOrEqual(
+            cronService.maxLogsPerFile,
+          );
+        }
+
+        await cronService.proceedBulkExportJob(afterExport);
+        await waitForCondition(() => uploadAttachmentSpy.mock.calls.length > 0);
+
+        expect(uploadAttachmentSpy).toHaveBeenCalledTimes(1);
+        const [readable, attachment] = uploadAttachmentSpy.mock.calls[0];
+        expect(readable).toBeDefined();
+        expect(attachment.originalName).toMatch(/audit-logs-.*\.zip$/);
+
+        const updatedJob = await AuditLogBulkExportJob.findById(job._id);
+        expect([
+          AuditLogBulkExportJobStatus.uploading,
+          AuditLogBulkExportJobStatus.completed,
+        ]).toContain(updatedJob?.status);
+        expect(updatedJob?.totalExportedCount).toBeGreaterThan(0);
+      });
+    });
+
+    describe('1-2. With Filters (actions / dateFrom / dateTo / users)', () => {
+      it('should export only filtered activities', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {
+            actions: [
+              SupportedAction.ACTION_PAGE_CREATE,
+              SupportedAction.ACTION_PAGE_UPDATE,
+            ],
+            dateFrom: new Date('2023-01-01T00:00:00Z'),
+            dateTo: new Date('2023-01-02T23:59:59Z'),
+            users: [testUser._id.toString()],
+          },
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'filtered-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        await cronService.proceedBulkExportJob(job);
+        const afterExport = await waitForJobStatus(
+          job._id,
+          AuditLogBulkExportJobStatus.uploading,
+        );
+
+        const outputDir = cronService.getTmpOutputDir(afterExport);
+        const files = fs.readdirSync(outputDir);
+        const jsonFiles = files.filter((file) => file.endsWith('.json'));
+
+        if (jsonFiles.length > 0) {
+          const content = JSON.parse(
+            fs.readFileSync(path.join(outputDir, jsonFiles[0]), 'utf8'),
+          );
+
+          content.forEach((activity: ExportedActivityData) => {
+            expect([
+              SupportedAction.ACTION_PAGE_CREATE,
+              SupportedAction.ACTION_PAGE_UPDATE,
+            ]).toContain(activity.action);
+            expect(new Date(activity.createdAt)).toBeInstanceOf(Date);
+            expect(activity.user).toBe(testUser._id.toString());
+          });
+        }
+
+        const updatedJob = await AuditLogBulkExportJob.findById(job._id);
+        expect(updatedJob?.totalExportedCount).toBeLessThanOrEqual(2);
+      });
+    });
+
+    describe('1-3. Zero Results', () => {
+      it('should handle cases with no matching activities', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {
+            actions: [SupportedAction.ACTION_USER_LOGOUT],
+          },
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'no-match-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        const notifySpy = vi.spyOn(cronService, 'notifyExportResultAndCleanUp');
+
+        await cronService.proceedBulkExportJob(job);
+        await waitForCondition(async () => {
+          const updatedJob = await AuditLogBulkExportJob.findById(job._id);
+          return updatedJob?.status !== AuditLogBulkExportJobStatus.exporting;
+        });
+
+        const afterExport = await AuditLogBulkExportJob.findById(job._id);
+        if (!afterExport) {
+          throw new Error('Job not found after export phase');
+        }
+
+        const outputDir = cronService.getTmpOutputDir(afterExport);
+        const files = fs.existsSync(outputDir) ? fs.readdirSync(outputDir) : [];
+        const jsonFiles = files.filter((file) => file.endsWith('.json'));
+
+        expect(jsonFiles.length).toBeLessThanOrEqual(1);
+
+        expect(afterExport.totalExportedCount).toBe(0);
+
+        expect(notifySpy).toHaveBeenCalledWith(
+          SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_NO_RESULTS,
+          expect.objectContaining({ _id: job._id }),
+        );
+      });
+    });
+  });
+
+  describe('2. Resumability', () => {
+    describe('2-1. Resume from lastExportedId', () => {
+      it('should resume export from the last exported ID without duplicates', async () => {
+        const activities = await Activity.find({}).sort({ _id: 1 });
+        const middleIndex = Math.floor(activities.length / 2);
+        const lastExportedId = activities[middleIndex]._id.toString();
+
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'resume-hash',
+          restartFlag: false,
+          totalExportedCount: middleIndex,
+          lastExportedId: lastExportedId,
+        });
+
+        await cronService.proceedBulkExportJob(job);
+        const afterExport = await waitForJobStatus(
+          job._id,
+          AuditLogBulkExportJobStatus.uploading,
+        );
+
+        const outputDir = cronService.getTmpOutputDir(afterExport);
+        const files = fs.readdirSync(outputDir);
+        const jsonFiles = files.filter((file) => file.endsWith('.json'));
+
+        if (jsonFiles.length > 0) {
+          const allExportedActivities: ExportedActivityData[] = [];
+
+          for (const file of jsonFiles) {
+            const content = JSON.parse(
+              fs.readFileSync(path.join(outputDir, file), 'utf8'),
+            );
+            allExportedActivities.push(...content);
+          }
+
+          allExportedActivities.forEach((activity) => {
+            expect(activity._id).not.toBe(lastExportedId);
+            expect(
+              new mongoose.Types.ObjectId(activity._id).getTimestamp(),
+            ).toBeInstanceOf(Date);
+          });
+        }
+
+        const updatedJob = await AuditLogBulkExportJob.findById(job._id);
+        expect(updatedJob?.totalExportedCount).toBeGreaterThan(middleIndex);
+      });
+    });
+
+    describe('2-2. totalExportedCount and lastExportedId Updates', () => {
+      it('should properly update totalExportedCount and lastExportedId', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'count-test-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        const initialCount = job.totalExportedCount ?? 0;
+
+        await cronService.proceedBulkExportJob(job);
+        const updatedJob = await waitForJobStatus(
+          job._id,
+          AuditLogBulkExportJobStatus.uploading,
+        );
+        expect(updatedJob?.totalExportedCount).toBeGreaterThan(initialCount);
+        expect(updatedJob?.lastExportedId).toBeDefined();
+
+        const totalActivities = await Activity.countDocuments({});
+        expect(updatedJob?.totalExportedCount).toBeLessThanOrEqual(
+          totalActivities,
+        );
+      });
+    });
+  });
+
+  describe('3. Upload and Compression', () => {
+    describe('3-1. ZIP Content Validity', () => {
+      it('should create valid ZIP with JSON files in root', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'zip-test-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        await cronService.proceedBulkExportJob(job);
+        const afterExport = await waitForJobStatus(
+          job._id,
+          AuditLogBulkExportJobStatus.uploading,
+        );
+
+        await cronService.proceedBulkExportJob(afterExport);
+        await waitForCondition(() => uploadAttachmentSpy.mock.calls.length > 0);
+
+        expect(uploadAttachmentSpy).toHaveBeenCalledTimes(1);
+        const [readable, attachment] = uploadAttachmentSpy.mock.calls[0];
+        expect(readable).toBeDefined();
+        expect(attachment.fileName).toMatch(/\.zip$/);
+      });
+    });
+
+    describe('3-2. Upload Failure Handling', () => {
+      it('should handle upload failures gracefully', async () => {
+        uploadAttachmentSpy.mockImplementationOnce(async (readable) => {
+          readable.on('error', () => {});
+          readable.resume();
+          throw new Error('Upload failed');
+        });
+
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.uploading,
+          filterHash: 'upload-fail-hash',
+          restartFlag: false,
+          totalExportedCount: 10,
+        });
+
+        const notifySpy = vi.spyOn(cronService, 'notifyExportResultAndCleanUp');
+        const cleanSpy = vi.spyOn(cronService, 'cleanUpExportJobResources');
+        const handleSpy = vi.spyOn(cronService, 'handleError');
+
+        await expect(
+          cronService.proceedBulkExportJob(job),
+        ).resolves.toBeUndefined();
+
+        expect(uploadAttachmentSpy).toHaveBeenCalledTimes(1);
+        expect(handleSpy).toHaveBeenCalledTimes(1);
+        expect(notifySpy).toHaveBeenCalledWith(
+          expect.anything(),
+          expect.objectContaining({ _id: job._id }),
+        );
+        expect(cleanSpy).toHaveBeenCalledWith(
+          expect.objectContaining({ _id: job._id }),
+        );
+
+        const reloaded = await AuditLogBulkExportJob.findById(job._id).lean();
+        expect(reloaded?.status).toBe(AuditLogBulkExportJobStatus.failed);
+
+        const s = cronService.getStreamInExecution(job._id);
+        expect(s).toBeUndefined();
+      });
+    });
+  });
+
+  describe('4. Error Handling', () => {
+    describe('4-1. Nonexistent Users Filter', () => {
+      it('should fail with no results for nonexistent usernames', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {
+            users: [new mongoose.Types.ObjectId()],
+          },
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'bad-user-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        await cronService.proceedBulkExportJob(job);
+        await waitForCondition(async () => {
+          const updatedJob = await AuditLogBulkExportJob.findById(job._id);
+          return updatedJob?.status === AuditLogBulkExportJobStatus.failed;
+        });
+
+        const updatedJob = await AuditLogBulkExportJob.findById(job._id);
+        expect(updatedJob?.status).toBe(AuditLogBulkExportJobStatus.failed);
+      });
+    });
+
+    describe('4-2. Stream/FS Errors', () => {
+      it('should handle filesystem errors', async () => {
+        cronService.tmpOutputRootDir = '/invalid/path/that/does/not/exist';
+
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'fs-error-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        await expect(async () => {
+          await cronService.proceedBulkExportJob(job);
+        }).not.toThrow();
+      });
+    });
+
+    describe('4-3. Job Expiry and Restart Errors', () => {
+      it('should handle AuditLogBulkExportJobExpiredError', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'expired-error-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        const expiredError = new AuditLogBulkExportJobExpiredError();
+
+        await cronService.handleError(expiredError, job);
+
+        const updatedJob = await AuditLogBulkExportJob.findById(job._id);
+        expect(updatedJob?.status).toBe(AuditLogBulkExportJobStatus.failed);
+      });
+
+      it('should handle AuditLogBulkExportJobRestartedError', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'restarted-error-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        const restartedError = new AuditLogBulkExportJobRestartedError();
+
+        await cronService.handleError(restartedError, job);
+      });
+    });
+  });
+
+  describe('5. State Transitions and Execution Control', () => {
+    describe('5-1. State Flow', () => {
+      it('should follow correct state transitions: exporting → uploading → completed', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'state-flow-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        expect(job.status).toBe(AuditLogBulkExportJobStatus.exporting);
+
+        await cronService.proceedBulkExportJob(job);
+        const afterExport = await waitForJobStatus(
+          job._id,
+          AuditLogBulkExportJobStatus.uploading,
+        );
+
+        expect(afterExport?.status).toBe(AuditLogBulkExportJobStatus.uploading);
+
+        await cronService.proceedBulkExportJob(afterExport);
+        await waitForCondition(() => uploadAttachmentSpy.mock.calls.length > 0);
+
+        await cronService.notifyExportResultAndCleanUp(
+          SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_COMPLETED,
+          afterExport,
+        );
+
+        const finalJob = await AuditLogBulkExportJob.findById(job._id);
+        expect(finalJob?.status).toBe(AuditLogBulkExportJobStatus.completed);
+      });
+    });
+
+    describe('5-2. Stream Lifecycle', () => {
+      it('should properly manage stream execution lifecycle', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'stream-lifecycle-hash',
+          restartFlag: false,
+          totalExportedCount: 0,
+        });
+
+        await cronService.proceedBulkExportJob(job);
+        const afterExport = await waitForJobStatus(
+          job._id,
+          AuditLogBulkExportJobStatus.uploading,
+        );
+
+        await cronService.cleanUpExportJobResources(afterExport);
+        const streamAfterCleanup = cronService.getStreamInExecution(job._id);
+        expect(streamAfterCleanup).toBeUndefined();
+      });
+    });
+
+    describe('5-3. Restart Flag Handling', () => {
+      it('should handle restartFlag correctly', async () => {
+        const job = await AuditLogBulkExportJob.create({
+          user: testUser._id,
+          filters: {},
+          format: AuditLogBulkExportFormat.json,
+          status: AuditLogBulkExportJobStatus.exporting,
+          filterHash: 'restart-flag-hash',
+          restartFlag: true,
+          totalExportedCount: 50,
+          lastExportedId: 'some-previous-id',
+        });
+
+        await cronService.proceedBulkExportJob(job);
+        await waitForCondition(async () => {
+          const updatedJob = await AuditLogBulkExportJob.findById(job._id);
+          return updatedJob?.restartFlag === false;
+        });
+
+        const updatedJob = await AuditLogBulkExportJob.findById(job._id);
+
+        expect(updatedJob?.restartFlag).toBe(false);
+        expect(updatedJob?.totalExportedCount).toBe(0);
+        expect(updatedJob?.lastExportedId).toBeUndefined();
+        expect(updatedJob?.status).toBe(AuditLogBulkExportJobStatus.exporting);
+      });
+    });
+  });
+});

+ 11 - 0
apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/errors.ts

@@ -0,0 +1,11 @@
+export class AuditLogBulkExportJobExpiredError extends Error {
+  constructor() {
+    super('Audit-log-bulk-export job has expired');
+  }
+}
+
+export class AuditLogBulkExportJobRestartedError extends Error {
+  constructor() {
+    super('Audit-log-bulk-export job has restarted');
+  }
+}

+ 297 - 0
apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/index.ts

@@ -0,0 +1,297 @@
+import fs from 'node:fs';
+import path from 'node:path';
+import type { Readable } from 'node:stream';
+import type { IUser } from '@growi/core';
+import { getIdForRef, isPopulated } from '@growi/core';
+import type archiver from 'archiver';
+import mongoose from 'mongoose';
+
+import type { SupportedActionType } from '~/interfaces/activity';
+import { SupportedAction, SupportedTargetModel } from '~/interfaces/activity';
+import type Crowi from '~/server/crowi';
+import type { ObjectIdLike } from '~/server/interfaces/mongoose-utils';
+import CronService from '~/server/service/cron';
+import loggerFactory from '~/utils/logger';
+
+import {
+  AuditLogBulkExportJobInProgressJobStatus,
+  AuditLogBulkExportJobStatus,
+} from '../../../interfaces/audit-log-bulk-export';
+import type { AuditLogBulkExportJobDocument } from '../../models/audit-log-bulk-export-job';
+import AuditLogBulkExportJob from '../../models/audit-log-bulk-export-job';
+import {
+  AuditLogBulkExportJobExpiredError,
+  AuditLogBulkExportJobRestartedError,
+} from './errors';
+
+const logger = loggerFactory('growi:service:audit-log-export-job-cron');
+
+export interface IAuditLogBulkExportJobCronService {
+  crowi: Crowi;
+  activityEvent: NodeJS.EventEmitter;
+  tmpOutputRootDir: string;
+  pageBatchSize: number;
+  maxLogsPerFile: number;
+  compressFormat: archiver.Format;
+  compressLevel: number;
+  proceedBulkExportJob(
+    auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+  ): Promise<void>;
+  getTmpOutputDir(auditLogBulkExportJob: AuditLogBulkExportJobDocument): string;
+  getStreamInExecution(jobId: ObjectIdLike): Readable | undefined;
+  setStreamInExecution(jobId: ObjectIdLike, stream: Readable): void;
+  removeStreamInExecution(jobId: ObjectIdLike): void;
+  notifyExportResultAndCleanUp(
+    action: SupportedActionType,
+    auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+  ): Promise<void>;
+  handleError(
+    err: Error | null,
+    auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+  ): Promise<void>;
+  cleanUpExportJobResources(
+    auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+    restarted?: boolean,
+  ): Promise<void>;
+}
+
+import type { ActivityDocument } from '~/server/models/activity';
+import { preNotifyService } from '~/server/service/pre-notify';
+
+import { compressAndUpload } from './steps/compress-and-upload';
+import { exportAuditLogsToFsAsync } from './steps/exportAuditLogsToFsAsync';
+
+/**
+ * Manages cronjob which proceeds AuditLogBulkExportJobs in progress.
+ * If AuditLogBulkExportJob finishes the current step, the next step will be started on the next cron execution.
+ */
+class AuditLogBulkExportJobCronService
+  extends CronService
+  implements IAuditLogBulkExportJobCronService
+{
+  crowi: Crowi;
+
+  activityEvent: NodeJS.EventEmitter;
+
+  private parallelExecLimit: number;
+
+  tmpOutputRootDir = '/tmp/audit-log-bulk-export';
+
+  pageBatchSize = 100;
+
+  maxLogsPerFile = 50;
+
+  compressFormat: archiver.Format = 'zip';
+
+  compressLevel = 6;
+
+  private streamInExecutionMemo: { [key: string]: Readable } = {};
+
+  constructor(crowi: Crowi) {
+    super();
+    this.crowi = crowi;
+    this.activityEvent = crowi.events.activity;
+    this.parallelExecLimit = 1;
+  }
+
+  override getCronSchedule(): string {
+    return '*/10 * * * * *';
+  }
+
+  override async executeJob(): Promise<void> {
+    const auditLogBulkExportJobInProgress = await AuditLogBulkExportJob.find({
+      $or: Object.values(AuditLogBulkExportJobInProgressJobStatus).map(
+        (status) => ({
+          status,
+        }),
+      ),
+    })
+      .sort({ createdAt: 1 })
+      .limit(this.parallelExecLimit);
+    await Promise.all(
+      auditLogBulkExportJobInProgress.map((auditLogBulkExportJob) =>
+        this.proceedBulkExportJob(auditLogBulkExportJob),
+      ),
+    );
+  }
+
+  async proceedBulkExportJob(
+    auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+  ) {
+    try {
+      if (auditLogBulkExportJob.restartFlag) {
+        await this.cleanUpExportJobResources(auditLogBulkExportJob, true);
+        auditLogBulkExportJob.restartFlag = false;
+        auditLogBulkExportJob.status = AuditLogBulkExportJobStatus.exporting;
+        auditLogBulkExportJob.lastExportedId = undefined;
+        auditLogBulkExportJob.totalExportedCount = 0;
+        await auditLogBulkExportJob.save();
+        return;
+      }
+      const User = mongoose.model<IUser>('User');
+      const user = await User.findById(getIdForRef(auditLogBulkExportJob.user));
+
+      if (!user) {
+        throw new Error(
+          `User not found for audit log export job: ${auditLogBulkExportJob._id}`,
+        );
+      }
+
+      if (
+        auditLogBulkExportJob.status === AuditLogBulkExportJobStatus.exporting
+      ) {
+        await exportAuditLogsToFsAsync.bind(this)(auditLogBulkExportJob);
+      } else if (
+        auditLogBulkExportJob.status === AuditLogBulkExportJobStatus.uploading
+      ) {
+        await compressAndUpload.bind(this)(user, auditLogBulkExportJob);
+      }
+    } catch (err) {
+      logger.error(err);
+    }
+  }
+
+  getTmpOutputDir(
+    auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+  ): string {
+    const jobId = auditLogBulkExportJob._id.toString();
+    return path.join(this.tmpOutputRootDir, jobId);
+  }
+
+  /**
+   * Get the stream in execution for a job.
+   * A getter method that includes "undefined" in the return type
+   */
+  getStreamInExecution(jobId: ObjectIdLike): Readable | undefined {
+    return this.streamInExecutionMemo[jobId.toString()];
+  }
+
+  /**
+   * Set the stream in execution for a job
+   */
+  setStreamInExecution(jobId: ObjectIdLike, stream: Readable) {
+    this.streamInExecutionMemo[jobId.toString()] = stream;
+  }
+
+  /**
+   * Remove the stream in execution for a job
+   */
+  removeStreamInExecution(jobId: ObjectIdLike) {
+    delete this.streamInExecutionMemo[jobId.toString()];
+  }
+
+  async notifyExportResultAndCleanUp(
+    action: SupportedActionType,
+    auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+  ): Promise<void> {
+    auditLogBulkExportJob.status =
+      action === SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_COMPLETED
+        ? AuditLogBulkExportJobStatus.completed
+        : AuditLogBulkExportJobStatus.failed;
+
+    try {
+      await auditLogBulkExportJob.save();
+      await this.notifyExportResult(auditLogBulkExportJob, action);
+    } catch (err) {
+      logger.error(err);
+    }
+    await this.cleanUpExportJobResources(auditLogBulkExportJob);
+  }
+
+  private async notifyExportResult(
+    auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+    action: SupportedActionType,
+  ) {
+    logger.debug(
+      'Creating activity with targetModel:',
+      SupportedTargetModel.MODEL_AUDIT_LOG_BULK_EXPORT_JOB,
+    );
+    const activity = await this.crowi.activityService.createActivity({
+      action,
+      targetModel: SupportedTargetModel.MODEL_AUDIT_LOG_BULK_EXPORT_JOB,
+      target: auditLogBulkExportJob,
+      user: auditLogBulkExportJob.user,
+      snapshot: {
+        username: isPopulated(auditLogBulkExportJob.user)
+          ? auditLogBulkExportJob.user.username
+          : '',
+      },
+    });
+    const getAdditionalTargetUsers = async (activity: ActivityDocument) => [
+      activity.user,
+    ];
+    const preNotify = preNotifyService.generatePreNotify(
+      activity,
+      getAdditionalTargetUsers,
+    );
+    this.activityEvent.emit(
+      'updated',
+      activity,
+      auditLogBulkExportJob,
+      preNotify,
+    );
+  }
+
+  async handleError(
+    err: Error | null,
+    auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+  ) {
+    if (err == null) return;
+
+    if (err instanceof AuditLogBulkExportJobExpiredError) {
+      logger.error(err);
+      await this.notifyExportResultAndCleanUp(
+        SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_JOB_EXPIRED,
+        auditLogBulkExportJob,
+      );
+    } else if (err instanceof AuditLogBulkExportJobRestartedError) {
+      logger.info(err.message);
+      await this.cleanUpExportJobResources(auditLogBulkExportJob);
+    } else {
+      logger.error(err);
+      await this.notifyExportResultAndCleanUp(
+        SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_FAILED,
+        auditLogBulkExportJob,
+      );
+    }
+  }
+
+  async cleanUpExportJobResources(
+    auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+    restarted = false,
+  ) {
+    const streamInExecution = this.getStreamInExecution(
+      auditLogBulkExportJob._id,
+    );
+    if (streamInExecution != null) {
+      if (restarted) {
+        streamInExecution.destroy(new AuditLogBulkExportJobRestartedError());
+      } else {
+        streamInExecution.destroy(new AuditLogBulkExportJobExpiredError());
+      }
+      this.removeStreamInExecution(auditLogBulkExportJob._id);
+    }
+
+    const promises = [
+      fs.promises.rm(this.getTmpOutputDir(auditLogBulkExportJob), {
+        recursive: true,
+        force: true,
+      }),
+    ];
+
+    const results = await Promise.allSettled(promises);
+    results.forEach((result) => {
+      if (result.status === 'rejected') logger.error(result.reason);
+    });
+  }
+}
+
+// eslint-disable-next-line import/no-mutable-exports
+export let auditLogBulkExportJobCronService:
+  | AuditLogBulkExportJobCronService
+  | undefined;
+export default function instantiate(crowi: Crowi): void {
+  auditLogBulkExportJobCronService = new AuditLogBulkExportJobCronService(
+    crowi,
+  );
+}

+ 104 - 0
apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/steps/compress-and-upload.ts

@@ -0,0 +1,104 @@
+import type { IUser } from '@growi/core';
+import type { Archiver } from 'archiver';
+import archiver from 'archiver';
+
+import { AuditLogBulkExportJobStatus } from '~/features/audit-log-bulk-export/interfaces/audit-log-bulk-export';
+import { SupportedAction } from '~/interfaces/activity';
+import { AttachmentType } from '~/server/interfaces/attachment';
+import {
+  Attachment,
+  type IAttachmentDocument,
+} from '~/server/models/attachment';
+import type { FileUploader } from '~/server/service/file-uploader';
+import loggerFactory from '~/utils/logger';
+
+import type { AuditLogBulkExportJobDocument } from '../../../models/audit-log-bulk-export-job';
+import type { IAuditLogBulkExportJobCronService } from '..';
+
+const logger = loggerFactory(
+  'growi:service:audit-log-export-job-cron:compress-and-upload-async',
+);
+
+function setUpAuditLogArchiver(
+  this: IAuditLogBulkExportJobCronService,
+): Archiver {
+  const auditLogArchiver = archiver(this.compressFormat, {
+    zlib: { level: this.compressLevel },
+  });
+
+  // good practice to catch warnings (ie stat failures and other non-blocking errors)
+  auditLogArchiver.on('warning', (err) => {
+    if (err.code === 'ENOENT') {
+      logger.error(err);
+    } else {
+      auditLogArchiver.emit('error', err);
+    }
+  });
+
+  return auditLogArchiver;
+}
+
+async function postProcess(
+  this: IAuditLogBulkExportJobCronService,
+  auditLogBulkExportJob: AuditLogBulkExportJobDocument,
+  attachment: IAttachmentDocument,
+  fileSize: number,
+): Promise<void> {
+  attachment.fileSize = fileSize;
+  await attachment.save();
+
+  auditLogBulkExportJob.completedAt = new Date();
+  auditLogBulkExportJob.attachment = attachment._id;
+  auditLogBulkExportJob.status = AuditLogBulkExportJobStatus.completed;
+  await auditLogBulkExportJob.save();
+
+  this.removeStreamInExecution(auditLogBulkExportJob._id);
+  await this.notifyExportResultAndCleanUp(
+    SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_COMPLETED,
+    auditLogBulkExportJob,
+  );
+}
+
+/**
+ * Execute a pipeline that reads the audit log files from the temporal fs directory,
+ * compresses them into a zip file, and uploads to the cloud storage.
+ */
+export async function compressAndUpload(
+  this: IAuditLogBulkExportJobCronService,
+  user: IUser,
+  job: AuditLogBulkExportJobDocument,
+): Promise<void> {
+  const auditLogArchiver = setUpAuditLogArchiver.bind(this)();
+
+  if (job.filterHash == null) throw new Error('filterHash is not set');
+
+  const originalName = `audit-logs-${job.filterHash}.zip`;
+  const attachment = Attachment.createWithoutSave(
+    null,
+    user,
+    originalName,
+    this.compressFormat,
+    0,
+    AttachmentType.AUDIT_LOG_BULK_EXPORT,
+  );
+  const fileUploadService: FileUploader = this.crowi.fileUploadService;
+
+  auditLogArchiver.directory(this.getTmpOutputDir(job), false);
+  auditLogArchiver.finalize();
+
+  this.setStreamInExecution(job._id, auditLogArchiver);
+  try {
+    await fileUploadService.uploadAttachment(auditLogArchiver, attachment);
+  } catch (e) {
+    logger.error(e);
+    try {
+      await this.handleError(e as Error, job);
+    } catch (handleErrorErr) {
+      logger.error('Error in handleError:', handleErrorErr);
+    }
+    job.status = AuditLogBulkExportJobStatus.failed;
+    await job.save();
+    return;
+  }
+  await postProcess.bind(this)(job, attachment, auditLogArchiver.pointer());
+}

+ 139 - 0
apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/steps/exportAuditLogsToFsAsync.ts

@@ -0,0 +1,139 @@
+import fs from 'node:fs';
+import path from 'node:path';
+import type { Readable } from 'node:stream';
+import { pipeline, Writable } from 'node:stream';
+import type { FilterQuery } from 'mongoose';
+
+import { AuditLogBulkExportJobStatus } from '~/features/audit-log-bulk-export/interfaces/audit-log-bulk-export';
+import { SupportedAction } from '~/interfaces/activity';
+import Activity, { type ActivityDocument } from '~/server/models/activity';
+
+import type { AuditLogBulkExportJobDocument } from '../../../models/audit-log-bulk-export-job';
+import type { IAuditLogBulkExportJobCronService } from '..';
+
+/**
+ * Get a Writable that writes audit logs to JSON files
+ */
+function getAuditLogWritable(
+  this: IAuditLogBulkExportJobCronService,
+  job: AuditLogBulkExportJobDocument,
+): Writable {
+  const outputDir = this.getTmpOutputDir(job);
+  let buffer: ActivityDocument[] = [];
+  let fileIndex = 0;
+  return new Writable({
+    objectMode: true,
+    write: async (log: ActivityDocument, _encoding, callback) => {
+      try {
+        buffer.push(log);
+
+        // Update lastExportedId for resumability
+        job.lastExportedId = log._id.toString();
+        job.totalExportedCount = (job.totalExportedCount || 0) + 1;
+
+        if (buffer.length >= this.maxLogsPerFile) {
+          const filePath = path.join(
+            outputDir,
+            `audit-logs-${job._id.toString()}-${String(fileIndex).padStart(2, '0')}.json`,
+          );
+          await fs.promises.mkdir(path.dirname(filePath), { recursive: true });
+          await fs.promises.writeFile(
+            filePath,
+            JSON.stringify(buffer, null, 2),
+          );
+
+          await job.save();
+
+          buffer = [];
+          fileIndex++;
+        }
+      } catch (err) {
+        callback(err as Error);
+        return;
+      }
+      callback();
+    },
+    final: async (callback) => {
+      try {
+        if (buffer.length > 0) {
+          const filePath = path.join(
+            outputDir,
+            `audit-logs-${job._id.toString()}-${String(fileIndex).padStart(2, '0')}.json`,
+          );
+          await fs.promises.mkdir(path.dirname(filePath), { recursive: true });
+          await fs.promises.writeFile(
+            filePath,
+            JSON.stringify(buffer, null, 2),
+          );
+        }
+        job.status = AuditLogBulkExportJobStatus.uploading;
+        await job.save();
+      } catch (err) {
+        callback(err as Error);
+        return;
+      }
+      callback();
+    },
+  });
+}
+
+/**
+ * Export audit logs to the file system before compressing and uploading.
+ */
+export async function exportAuditLogsToFsAsync(
+  this: IAuditLogBulkExportJobCronService,
+  job: AuditLogBulkExportJobDocument,
+): Promise<void> {
+  const filters = job.filters ?? {};
+  const query: FilterQuery<ActivityDocument> = {};
+
+  // Build query filters for searching activity logs based on user-defined filters
+  if (filters.actions && filters.actions.length > 0) {
+    query.action = { $in: filters.actions };
+  }
+  if (filters.dateFrom || filters.dateTo) {
+    query.createdAt = {};
+    if (filters.dateFrom) {
+      query.createdAt.$gte = new Date(filters.dateFrom);
+    }
+    if (filters.dateTo) {
+      query.createdAt.$lte = new Date(filters.dateTo);
+    }
+  }
+  if (filters.users && filters.users.length > 0) {
+    query.user = { $in: filters.users };
+  }
+
+  // If the previous export was incomplete, resume from the last exported ID by adding it to the query filter
+  if (job.lastExportedId) {
+    query._id = { $gt: job.lastExportedId };
+  }
+
+  const hasAny = await Activity.exists(query);
+  if (!hasAny) {
+    job.totalExportedCount = 0;
+    job.status = AuditLogBulkExportJobStatus.completed;
+    job.lastExportedId = undefined;
+    await job.save();
+
+    await this.notifyExportResultAndCleanUp(
+      SupportedAction.ACTION_AUDIT_LOG_BULK_EXPORT_NO_RESULTS,
+      job,
+    );
+    return;
+  }
+
+  const logsCursor = Activity.find(query)
+
+    .sort({ _id: 1 })
+    .lean()
+    .cursor({ batchSize: this.pageBatchSize });
+
+  const writable = getAuditLogWritable.bind(this)(job);
+
+  this.setStreamInExecution(job._id, logsCursor as unknown as Readable);
+
+  pipeline(logsCursor, writable, (err) => {
+    this.handleError(err, job);
+  });
+}

+ 335 - 0
apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export.integ.ts

@@ -0,0 +1,335 @@
+import mongoose from 'mongoose';
+
+import type { SupportedActionType } from '~/interfaces/activity';
+import { configManager } from '~/server/service/config-manager';
+
+import {
+  AuditLogBulkExportFormat,
+  AuditLogBulkExportJobStatus,
+} from '../../interfaces/audit-log-bulk-export';
+import AuditLogBulkExportJob from '../models/audit-log-bulk-export-job';
+import {
+  auditLogBulkExportService,
+  DuplicateAuditLogBulkExportJobError,
+} from './audit-log-bulk-export';
+
+const userSchema = new mongoose.Schema(
+  {
+    name: { type: String },
+    username: { type: String, required: true, unique: true },
+    email: { type: String, unique: true, sparse: true },
+  },
+  {
+    timestamps: true,
+  },
+);
+const User = mongoose.model('User', userSchema);
+
+describe('AuditLogBulkExportService', () => {
+  // biome-ignore lint/suspicious/noImplicitAnyLet: ignore
+  let user;
+
+  beforeAll(async () => {
+    await configManager.loadConfigs();
+    user = await User.create({
+      name: 'Example for AuditLogBulkExportService Test',
+      username: 'audit bulk export test user',
+      email: 'auditBulkExportTestUser@example.com',
+    });
+  });
+
+  afterEach(async () => {
+    await AuditLogBulkExportJob.deleteMany({});
+  });
+
+  afterAll(async () => {
+    await User.deleteOne({ _id: user._id });
+  });
+
+  describe('createOrResetExportJob', () => {
+    describe('normal cases', () => {
+      it('should create a new export job with valid parameters', async () => {
+        const filters: {
+          actions: SupportedActionType[];
+          dateFrom: Date;
+          dateTo: Date;
+        } = {
+          actions: ['PAGE_VIEW', 'PAGE_CREATE'],
+          dateFrom: new Date('2023-01-01'),
+          dateTo: new Date('2023-12-31'),
+        };
+
+        const jobId = await auditLogBulkExportService.createOrResetExportJob(
+          filters,
+          AuditLogBulkExportFormat.json,
+          user._id,
+        );
+
+        expect(jobId).toMatch(/^[0-9a-fA-F]{24}$/);
+
+        const createdJob = await AuditLogBulkExportJob.findById(jobId);
+        expect(createdJob).toBeDefined();
+        expect(createdJob?.user).toEqual(user._id);
+        expect(createdJob?.format).toBe(AuditLogBulkExportFormat.json);
+        expect(createdJob?.status).toBe(AuditLogBulkExportJobStatus.exporting);
+        expect(createdJob?.totalExportedCount).toBe(0);
+        expect(createdJob?.filters).toMatchObject({
+          actions: ['PAGE_VIEW', 'PAGE_CREATE'],
+          dateFrom: new Date('2023-01-01T00:00:00.000Z'),
+          dateTo: new Date('2023-12-31T00:00:00.000Z'),
+        });
+      });
+
+      it('should create a job with minimal filters', async () => {
+        const filters: { actions: SupportedActionType[] } = {
+          actions: ['PAGE_VIEW'],
+        };
+
+        const jobId = await auditLogBulkExportService.createOrResetExportJob(
+          filters,
+          AuditLogBulkExportFormat.json,
+          user._id,
+        );
+
+        const createdJob = await AuditLogBulkExportJob.findById(jobId);
+        expect(createdJob).toBeDefined();
+        expect(createdJob?.format).toBe(AuditLogBulkExportFormat.json);
+        expect(createdJob?.filters).toMatchObject({
+          actions: ['PAGE_VIEW'],
+        });
+      });
+
+      it('should create a job with user filters', async () => {
+        const filters: { usernames: string[]; actions: SupportedActionType[] } =
+          {
+            usernames: [user.username],
+            actions: ['PAGE_CREATE'],
+          };
+
+        const jobId = await auditLogBulkExportService.createOrResetExportJob(
+          filters,
+          AuditLogBulkExportFormat.json,
+          user._id,
+        );
+
+        const createdJob = await AuditLogBulkExportJob.findById(jobId);
+        expect(createdJob?.filters.actions).toEqual(['PAGE_CREATE']);
+        expect(createdJob?.filters.users?.map(String)).toContain(
+          user._id.toString(),
+        );
+      });
+
+      it('should reset existing job when restartJob is true', async () => {
+        const filters: { actions: SupportedActionType[] } = {
+          actions: ['PAGE_VIEW'],
+        };
+
+        const firstJobId =
+          await auditLogBulkExportService.createOrResetExportJob(
+            filters,
+            AuditLogBulkExportFormat.json,
+            user._id,
+          );
+
+        const secondJobId =
+          await auditLogBulkExportService.createOrResetExportJob(
+            filters,
+            AuditLogBulkExportFormat.json,
+            user._id,
+            true,
+          );
+
+        expect(secondJobId).toBe(firstJobId);
+
+        const job = await AuditLogBulkExportJob.findById(firstJobId);
+        expect(job?.restartFlag).toBe(true);
+      });
+    });
+
+    describe('error cases', () => {
+      it('should throw DuplicateAuditLogBulkExportJobError when duplicate job exists', async () => {
+        const filters: { actions: SupportedActionType[] } = {
+          actions: ['PAGE_VIEW'],
+        };
+
+        await auditLogBulkExportService.createOrResetExportJob(
+          filters,
+          AuditLogBulkExportFormat.json,
+          user._id,
+        );
+
+        await expect(
+          auditLogBulkExportService.createOrResetExportJob(
+            filters,
+            AuditLogBulkExportFormat.json,
+            user._id,
+          ),
+        ).rejects.toThrow(DuplicateAuditLogBulkExportJobError);
+      });
+
+      it('should allow creating job with same filters for different user', async () => {
+        const anotherUser = await User.create({
+          name: 'Another User',
+          username: 'another user',
+          email: 'another@example.com',
+        });
+
+        const filters: { actions: SupportedActionType[] } = {
+          actions: ['PAGE_VIEW'],
+        };
+
+        const firstJobId =
+          await auditLogBulkExportService.createOrResetExportJob(
+            filters,
+            AuditLogBulkExportFormat.json,
+            user._id,
+          );
+
+        const secondJobId =
+          await auditLogBulkExportService.createOrResetExportJob(
+            filters,
+            AuditLogBulkExportFormat.json,
+            anotherUser._id,
+          );
+
+        expect(firstJobId).not.toBe(secondJobId);
+
+        await User.deleteOne({ _id: anotherUser._id });
+      });
+
+      it('should allow creating job with different filters for same user', async () => {
+        const firstFilters: { actions: SupportedActionType[] } = {
+          actions: ['PAGE_VIEW'],
+        };
+        const secondFilters: { actions: SupportedActionType[] } = {
+          actions: ['PAGE_CREATE'],
+        };
+
+        const firstJobId =
+          await auditLogBulkExportService.createOrResetExportJob(
+            firstFilters,
+            AuditLogBulkExportFormat.json,
+            user._id,
+          );
+
+        const secondJobId =
+          await auditLogBulkExportService.createOrResetExportJob(
+            secondFilters,
+            AuditLogBulkExportFormat.json,
+            user._id,
+          );
+
+        expect(firstJobId).not.toBe(secondJobId);
+      });
+
+      it('should not throw error if previous job is completed', async () => {
+        const filters: { actions: SupportedActionType[] } = {
+          actions: ['PAGE_VIEW'],
+        };
+
+        const firstJobId =
+          await auditLogBulkExportService.createOrResetExportJob(
+            filters,
+            AuditLogBulkExportFormat.json,
+            user._id,
+          );
+
+        const firstJob = await AuditLogBulkExportJob.findById(firstJobId);
+        if (firstJob) {
+          firstJob.status = AuditLogBulkExportJobStatus.completed;
+          await firstJob.save();
+        }
+
+        const secondJobId =
+          await auditLogBulkExportService.createOrResetExportJob(
+            filters,
+            AuditLogBulkExportFormat.json,
+            user._id,
+          );
+
+        expect(secondJobId).not.toBe(firstJobId);
+      });
+    });
+  });
+
+  describe('resetExportJob', () => {
+    it('should set restartFlag to true', async () => {
+      const filters = { actions: ['PAGE_VIEW'] as SupportedActionType[] };
+
+      const jobId = await auditLogBulkExportService.createOrResetExportJob(
+        filters,
+        AuditLogBulkExportFormat.json,
+        user._id,
+      );
+
+      const job = await AuditLogBulkExportJob.findById(jobId);
+      expect(job?.restartFlag).toBeFalsy();
+
+      if (job) {
+        await auditLogBulkExportService.resetExportJob(job);
+      }
+
+      const updatedJob = await AuditLogBulkExportJob.findById(jobId);
+      expect(updatedJob?.restartFlag).toBe(true);
+    });
+  });
+
+  describe('filter canonicalization', () => {
+    it('should generate same job for logically equivalent filters', async () => {
+      const filters1: { actions: SupportedActionType[]; usernames: string[] } =
+        {
+          actions: ['PAGE_VIEW', 'PAGE_CREATE'],
+          usernames: ['alice', 'bob'],
+        };
+
+      const filters2: { actions: SupportedActionType[]; usernames: string[] } =
+        {
+          actions: ['PAGE_CREATE', 'PAGE_VIEW'],
+          usernames: ['bob', 'alice'],
+        };
+
+      await auditLogBulkExportService.createOrResetExportJob(
+        filters1,
+        AuditLogBulkExportFormat.json,
+        user._id,
+      );
+
+      await expect(
+        auditLogBulkExportService.createOrResetExportJob(
+          filters2,
+          AuditLogBulkExportFormat.json,
+          user._id,
+        ),
+      ).rejects.toThrow(DuplicateAuditLogBulkExportJobError);
+    });
+
+    it('should normalize date formats consistently', async () => {
+      const dateString = '2023-01-01T00:00:00.000Z';
+      const dateObject = new Date(dateString);
+
+      const filters1: { actions: SupportedActionType[]; dateFrom: Date } = {
+        actions: ['PAGE_VIEW'],
+        dateFrom: new Date(dateString),
+      };
+
+      const filters2: { actions: SupportedActionType[]; dateFrom: Date } = {
+        actions: ['PAGE_VIEW'],
+        dateFrom: dateObject,
+      };
+
+      await auditLogBulkExportService.createOrResetExportJob(
+        filters1,
+        AuditLogBulkExportFormat.json,
+        user._id,
+      );
+
+      await expect(
+        auditLogBulkExportService.createOrResetExportJob(
+          filters2,
+          AuditLogBulkExportFormat.json,
+          user._id,
+        ),
+      ).rejects.toThrow(DuplicateAuditLogBulkExportJobError);
+    });
+  });
+});

+ 135 - 0
apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export.ts

@@ -0,0 +1,135 @@
+import { createHash } from 'node:crypto';
+import mongoose from 'mongoose';
+
+import type {
+  AuditLogBulkExportFormat,
+  IAuditLogBulkExportFilters,
+  IAuditLogBulkExportRequestFilters,
+} from '../../interfaces/audit-log-bulk-export';
+import {
+  AuditLogBulkExportJobInProgressJobStatus,
+  AuditLogBulkExportJobStatus,
+} from '../../interfaces/audit-log-bulk-export';
+import type { AuditLogBulkExportJobDocument } from '../models/audit-log-bulk-export-job';
+import AuditLogBulkExportJob from '../models/audit-log-bulk-export-job';
+
+export interface IAuditLogBulkExportService {
+  createOrResetExportJob: (
+    requestFilters: IAuditLogBulkExportRequestFilters,
+    format: AuditLogBulkExportFormat,
+    currentUser,
+    restartJob?: boolean,
+  ) => Promise<string>;
+  resetExportJob: (job: AuditLogBulkExportJobDocument) => Promise<void>;
+}
+
+/** ============================== utils ============================== */
+
+/**
+ * Normalizes filter values to ensure that logically equivalent filters,
+ * regardless of order or formatting differences, generate the same hash.
+ */
+function canonicalizeFilters(filters: IAuditLogBulkExportFilters) {
+  const normalized: Record<string, unknown> = {};
+
+  if (filters.users?.length) {
+    normalized.users = filters.users.map(String).sort();
+  }
+  if (filters.actions?.length) {
+    normalized.actions = [...filters.actions].sort();
+  }
+  if (filters.dateFrom) {
+    normalized.dateFrom = new Date(filters.dateFrom).toISOString();
+  }
+  if (filters.dateTo) {
+    normalized.dateTo = new Date(filters.dateTo).toISOString();
+  }
+  return normalized;
+}
+
+/**
+ * Generates a SHA-256 hash used to uniquely identify a set of filters.
+ * Requests with the same input produce the same hash value,
+ * preventing duplicate audit-log export jobs from being executed.
+ */
+function sha256(input: string): string {
+  return createHash('sha256').update(input).digest('hex');
+}
+
+/** ============================== error ============================== */
+
+export class DuplicateAuditLogBulkExportJobError extends Error {
+  duplicateJob: AuditLogBulkExportJobDocument;
+
+  constructor(duplicateJob: AuditLogBulkExportJobDocument) {
+    super('Duplicate audit-log bulk export job is in progress');
+    this.duplicateJob = duplicateJob;
+  }
+}
+
+/** ============================== service ============================== */
+
+class AuditLogBulkExportService implements IAuditLogBulkExportService {
+  /**
+   * Create a new audit-log bulk export job or reset the existing one
+   */
+  async createOrResetExportJob(
+    requestFilters: IAuditLogBulkExportRequestFilters,
+    format: AuditLogBulkExportFormat,
+    currentUser,
+    restartJob?: boolean,
+  ): Promise<string> {
+    const filters: IAuditLogBulkExportFilters = {
+      actions: requestFilters.actions,
+      dateFrom: requestFilters.dateFrom,
+      dateTo: requestFilters.dateTo,
+    };
+    if (requestFilters.usernames?.length) {
+      const User = mongoose.model('User');
+      const userIds = await User.find({
+        username: { $in: requestFilters.usernames },
+      }).distinct('_id');
+      filters.users = userIds;
+    }
+
+    const normalizedFilters = canonicalizeFilters(filters);
+    const filterHash = sha256(JSON.stringify(normalizedFilters));
+
+    const duplicateInProgress: AuditLogBulkExportJobDocument | null =
+      await AuditLogBulkExportJob.findOne({
+        user: { $eq: currentUser },
+        filterHash,
+        $or: Object.values(AuditLogBulkExportJobInProgressJobStatus).map(
+          (status) => ({ status }),
+        ),
+      });
+
+    if (duplicateInProgress != null) {
+      if (restartJob) {
+        await this.resetExportJob(duplicateInProgress);
+        return duplicateInProgress._id.toString();
+      }
+      throw new DuplicateAuditLogBulkExportJobError(duplicateInProgress);
+    }
+
+    const createdJob = await AuditLogBulkExportJob.create({
+      user: currentUser,
+      filters,
+      filterHash,
+      format,
+      status: AuditLogBulkExportJobStatus.exporting,
+      totalExportedCount: 0,
+    });
+    return createdJob._id.toString();
+  }
+
+  /**
+   * Reset audit-log export job in progress
+   */
+  async resetExportJob(job: AuditLogBulkExportJobDocument): Promise<void> {
+    job.restartFlag = true;
+    await job.save();
+  }
+}
+
+export const auditLogBulkExportService = new AuditLogBulkExportService(); // singleton

+ 42 - 0
apps/app/src/features/audit-log-bulk-export/server/service/check-audit-log-bulk-export-job-in-progress-cron.ts

@@ -0,0 +1,42 @@
+import { configManager } from '~/server/service/config-manager';
+import CronService from '~/server/service/cron';
+
+import { AuditLogBulkExportJobInProgressJobStatus } from '../../interfaces/audit-log-bulk-export';
+import AuditLogExportJob from '../models/audit-log-bulk-export-job';
+import { auditLogBulkExportJobCronService } from './audit-log-bulk-export-job-cron';
+
+/**
+ * Manages cronjob which checks if AuditLogExportJob in progress exists.
+ * If it does, and AuditLogExportJobCronService is not running, start AuditLogExportJobCronService
+ */
+class CheckAuditLogBulkExportJobInProgressCronService extends CronService {
+  override getCronSchedule(): string {
+    return '*/3 * * * *';
+  }
+
+  override async executeJob(): Promise<void> {
+    const isAuditLogEnabled = configManager.getConfig('app:auditLogEnabled');
+    if (!isAuditLogEnabled) return;
+
+    const auditLogExportJobInProgress = await AuditLogExportJob.findOne({
+      $or: Object.values(AuditLogBulkExportJobInProgressJobStatus).map(
+        (status) => ({
+          status,
+        }),
+      ),
+    });
+    const auditLogExportInProgressExists = auditLogExportJobInProgress != null;
+
+    if (
+      auditLogExportInProgressExists &&
+      !auditLogBulkExportJobCronService?.isJobRunning()
+    ) {
+      auditLogBulkExportJobCronService?.startCron();
+    } else if (!auditLogExportInProgressExists) {
+      auditLogBulkExportJobCronService?.stopCron();
+    }
+  }
+}
+
+export const checkAuditLogExportJobInProgressCronService =
+  new CheckAuditLogBulkExportJobInProgressCronService();

+ 1 - 2
apps/app/src/features/openai/server/services/openai.ts

@@ -13,7 +13,6 @@ import {
 } from '@growi/core';
 } from '@growi/core';
 import { deepEquals } from '@growi/core/dist/utils';
 import { deepEquals } from '@growi/core/dist/utils';
 import { isGlobPatternPath } from '@growi/core/dist/utils/page-path-utils';
 import { isGlobPatternPath } from '@growi/core/dist/utils/page-path-utils';
-import escapeStringRegexp from 'escape-string-regexp';
 import createError from 'http-errors';
 import createError from 'http-errors';
 import mongoose, { type HydratedDocument, type Types } from 'mongoose';
 import mongoose, { type HydratedDocument, type Types } from 'mongoose';
 import type { OpenAI } from 'openai';
 import type { OpenAI } from 'openai';
@@ -78,7 +77,7 @@ const convertPathPatternsToRegExp = (
   return pagePathPatterns.map((pagePathPattern) => {
   return pagePathPatterns.map((pagePathPattern) => {
     if (isGlobPatternPath(pagePathPattern)) {
     if (isGlobPatternPath(pagePathPattern)) {
       const trimedPagePathPattern = pagePathPattern.replace('/*', '');
       const trimedPagePathPattern = pagePathPattern.replace('/*', '');
-      const escapedPagePathPattern = escapeStringRegexp(trimedPagePathPattern);
+      const escapedPagePathPattern = RegExp.escape(trimedPagePathPattern);
       // https://regex101.com/r/x5KIZL/1
       // https://regex101.com/r/x5KIZL/1
       return new RegExp(`^${escapedPagePathPattern}($|/)`);
       return new RegExp(`^${escapedPagePathPattern}($|/)`);
     }
     }

+ 9 - 8
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/compress-and-upload.ts

@@ -53,7 +53,12 @@ async function postProcess(
 }
 }
 
 
 /**
 /**
- * Execute a pipeline that reads the page files from the temporal fs directory, compresses them, and uploads to the cloud storage
+ * Compress page files into a tar.gz archive and upload to cloud storage.
+ *
+ * Wraps archiver output with PassThrough to provide a Node.js native Readable,
+ * since archiver uses npm's readable-stream which fails AWS SDK's instanceof check.
+ * The Content-Length / Transfer-Encoding issue is resolved by aws/index.ts using
+ * the Upload class from @aws-sdk/lib-storage.
  */
  */
 export async function compressAndUpload(
 export async function compressAndUpload(
   this: IPageBulkExportJobCronService,
   this: IPageBulkExportJobCronService,
@@ -78,12 +83,11 @@ export async function compressAndUpload(
 
 
   // Wrap with Node.js native PassThrough so that AWS SDK recognizes the stream as a native Readable
   // Wrap with Node.js native PassThrough so that AWS SDK recognizes the stream as a native Readable
   const uploadStream = new PassThrough();
   const uploadStream = new PassThrough();
-
-  // Establish pipe before finalize to ensure data flows correctly
   pageArchiver.pipe(uploadStream);
   pageArchiver.pipe(uploadStream);
+
   pageArchiver.on('error', (err) => {
   pageArchiver.on('error', (err) => {
+    logger.error('pageArchiver error', err);
     uploadStream.destroy(err);
     uploadStream.destroy(err);
-    pageArchiver.destroy();
   });
   });
 
 
   pageArchiver.directory(this.getTmpOutputDir(pageBulkExportJob), false);
   pageArchiver.directory(this.getTmpOutputDir(pageBulkExportJob), false);
@@ -100,9 +104,6 @@ export async function compressAndUpload(
     );
     );
   } catch (e) {
   } catch (e) {
     logger.error(e);
     logger.error(e);
-    this.handleError(e, pageBulkExportJob);
-  } finally {
-    pageArchiver.destroy();
-    uploadStream.destroy();
+    await this.handleError(e, pageBulkExportJob);
   }
   }
 }
 }

+ 17 - 0
apps/app/src/interfaces/activity.ts

@@ -13,6 +13,7 @@ const MODEL_PAGE = 'Page';
 const MODEL_USER = 'User';
 const MODEL_USER = 'User';
 const MODEL_COMMENT = 'Comment';
 const MODEL_COMMENT = 'Comment';
 const MODEL_PAGE_BULK_EXPORT_JOB = 'PageBulkExportJob';
 const MODEL_PAGE_BULK_EXPORT_JOB = 'PageBulkExportJob';
+const MODEL_AUDIT_LOG_BULK_EXPORT_JOB = 'AuditLogBulkExportJob';
 
 
 // Action
 // Action
 const ACTION_UNSETTLED = 'UNSETTLED';
 const ACTION_UNSETTLED = 'UNSETTLED';
@@ -67,6 +68,13 @@ const ACTION_PAGE_EXPORT = 'PAGE_EXPORT';
 const ACTION_PAGE_BULK_EXPORT_COMPLETED = 'PAGE_BULK_EXPORT_COMPLETED';
 const ACTION_PAGE_BULK_EXPORT_COMPLETED = 'PAGE_BULK_EXPORT_COMPLETED';
 const ACTION_PAGE_BULK_EXPORT_FAILED = 'PAGE_BULK_EXPORT_FAILED';
 const ACTION_PAGE_BULK_EXPORT_FAILED = 'PAGE_BULK_EXPORT_FAILED';
 const ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED = 'PAGE_BULK_EXPORT_JOB_EXPIRED';
 const ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED = 'PAGE_BULK_EXPORT_JOB_EXPIRED';
+const ACTION_AUDIT_LOG_BULK_EXPORT_COMPLETED =
+  'AUDIT_LOG_BULK_EXPORT_COMPLETED';
+const ACTION_AUDIT_LOG_BULK_EXPORT_FAILED = 'AUDIT_LOG_BULK_EXPORT_FAILED';
+const ACTION_AUDIT_LOG_BULK_EXPORT_JOB_EXPIRED =
+  'AUDIT_LOG_BULK_EXPORT_JOB_EXPIRED';
+const ACTION_AUDIT_LOG_BULK_EXPORT_NO_RESULTS =
+  'ACTION_AUDIT_LOG_BULK_EXPORT_NO_RESULTS';
 const ACTION_TAG_UPDATE = 'TAG_UPDATE';
 const ACTION_TAG_UPDATE = 'TAG_UPDATE';
 const ACTION_IN_APP_NOTIFICATION_ALL_STATUSES_OPEN =
 const ACTION_IN_APP_NOTIFICATION_ALL_STATUSES_OPEN =
   'IN_APP_NOTIFICATION_ALL_STATUSES_OPEN';
   'IN_APP_NOTIFICATION_ALL_STATUSES_OPEN';
@@ -198,6 +206,7 @@ export const SupportedTargetModel = {
   MODEL_PAGE,
   MODEL_PAGE,
   MODEL_USER,
   MODEL_USER,
   MODEL_PAGE_BULK_EXPORT_JOB,
   MODEL_PAGE_BULK_EXPORT_JOB,
+  MODEL_AUDIT_LOG_BULK_EXPORT_JOB,
 } as const;
 } as const;
 
 
 export const SupportedEventModel = {
 export const SupportedEventModel = {
@@ -372,6 +381,10 @@ export const SupportedAction = {
   ACTION_PAGE_BULK_EXPORT_COMPLETED,
   ACTION_PAGE_BULK_EXPORT_COMPLETED,
   ACTION_PAGE_BULK_EXPORT_FAILED,
   ACTION_PAGE_BULK_EXPORT_FAILED,
   ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED,
   ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED,
+  ACTION_AUDIT_LOG_BULK_EXPORT_COMPLETED,
+  ACTION_AUDIT_LOG_BULK_EXPORT_FAILED,
+  ACTION_AUDIT_LOG_BULK_EXPORT_JOB_EXPIRED,
+  ACTION_AUDIT_LOG_BULK_EXPORT_NO_RESULTS,
 } as const;
 } as const;
 
 
 // Action required for notification
 // Action required for notification
@@ -394,6 +407,10 @@ export const EssentialActionGroup = {
   ACTION_PAGE_BULK_EXPORT_COMPLETED,
   ACTION_PAGE_BULK_EXPORT_COMPLETED,
   ACTION_PAGE_BULK_EXPORT_FAILED,
   ACTION_PAGE_BULK_EXPORT_FAILED,
   ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED,
   ACTION_PAGE_BULK_EXPORT_JOB_EXPIRED,
+  ACTION_AUDIT_LOG_BULK_EXPORT_COMPLETED,
+  ACTION_AUDIT_LOG_BULK_EXPORT_FAILED,
+  ACTION_AUDIT_LOG_BULK_EXPORT_JOB_EXPIRED,
+  ACTION_AUDIT_LOG_BULK_EXPORT_NO_RESULTS,
 } as const;
 } as const;
 
 
 export const ActionGroupSize = {
 export const ActionGroupSize = {

+ 12 - 0
apps/app/src/interfaces/session-config.ts

@@ -0,0 +1,12 @@
+export interface SessionConfig {
+  rolling: boolean;
+  secret: string;
+  resave: boolean;
+  saveUninitialized: boolean;
+  cookie: {
+    maxAge: number;
+  };
+  genid: (req: { path: string }) => string;
+  name?: string;
+  store?: unknown;
+}

+ 23 - 14
apps/app/src/server/crowi/index.ts

@@ -7,6 +7,9 @@ import lsxRoutes from '@growi/remark-lsx/dist/server/index.cjs';
 import type { Express } from 'express';
 import type { Express } from 'express';
 import mongoose from 'mongoose';
 import mongoose from 'mongoose';
 
 
+import instantiateAuditLogBulkExportJobCleanUpCronService from '~/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-clean-up-cron';
+import instantiateAuditLogBulkExportJobCronService from '~/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron';
+import { checkAuditLogExportJobInProgressCronService } from '~/features/audit-log-bulk-export/server/service/check-audit-log-bulk-export-job-in-progress-cron';
 import { KeycloakUserGroupSyncService } from '~/features/external-user-group/server/service/keycloak-user-group-sync';
 import { KeycloakUserGroupSyncService } from '~/features/external-user-group/server/service/keycloak-user-group-sync';
 import { LdapUserGroupSyncService } from '~/features/external-user-group/server/service/ldap-user-group-sync';
 import { LdapUserGroupSyncService } from '~/features/external-user-group/server/service/ldap-user-group-sync';
 import { startCronIfEnabled as startOpenaiCronIfEnabled } from '~/features/openai/server/services/cron';
 import { startCronIfEnabled as startOpenaiCronIfEnabled } from '~/features/openai/server/services/cron';
@@ -14,6 +17,7 @@ import { initializeOpenaiService } from '~/features/openai/server/services/opena
 import { checkPageBulkExportJobInProgressCronService } from '~/features/page-bulk-export/server/service/check-page-bulk-export-job-in-progress-cron';
 import { checkPageBulkExportJobInProgressCronService } from '~/features/page-bulk-export/server/service/check-page-bulk-export-job-in-progress-cron';
 import instanciatePageBulkExportJobCleanUpCronService from '~/features/page-bulk-export/server/service/page-bulk-export-job-clean-up-cron';
 import instanciatePageBulkExportJobCleanUpCronService from '~/features/page-bulk-export/server/service/page-bulk-export-job-clean-up-cron';
 import instanciatePageBulkExportJobCronService from '~/features/page-bulk-export/server/service/page-bulk-export-job-cron';
 import instanciatePageBulkExportJobCronService from '~/features/page-bulk-export/server/service/page-bulk-export-job-cron';
+import type { SessionConfig } from '~/interfaces/session-config';
 import { startCron as startAccessTokenCron } from '~/server/service/access-token';
 import { startCron as startAccessTokenCron } from '~/server/service/access-token';
 import { projectRoot } from '~/server/util/project-dir-utils';
 import { projectRoot } from '~/server/util/project-dir-utils';
 import { getGrowiVersion } from '~/utils/growi-version';
 import { getGrowiVersion } from '~/utils/growi-version';
@@ -84,19 +88,6 @@ type CommentServiceType = any;
 type SyncPageStatusServiceType = any;
 type SyncPageStatusServiceType = any;
 type CrowiDevType = any;
 type CrowiDevType = any;
 
 
-interface SessionConfig {
-  rolling: boolean;
-  secret: string;
-  resave: boolean;
-  saveUninitialized: boolean;
-  cookie: {
-    maxAge: number;
-  };
-  genid: (req: { path: string }) => string;
-  name?: string;
-  store?: unknown;
-}
-
 interface CrowiEvents {
 interface CrowiEvents {
   user: UserEvent;
   user: UserEvent;
   page: PageEventType;
   page: PageEventType;
@@ -448,6 +439,20 @@ class Crowi {
     }
     }
     pageBulkExportJobCleanUpCronService.startCron();
     pageBulkExportJobCleanUpCronService.startCron();
 
 
+    instantiateAuditLogBulkExportJobCronService(this);
+    checkAuditLogExportJobInProgressCronService.startCron();
+
+    instantiateAuditLogBulkExportJobCleanUpCronService(this);
+    const { auditLogBulkExportJobCleanUpCronService } = await import(
+      '~/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-clean-up-cron'
+    );
+    if (auditLogBulkExportJobCleanUpCronService == null) {
+      throw new Error(
+        'auditLogBulkExportJobCleanUpCronService is not initialized',
+      );
+    }
+    auditLogBulkExportJobCleanUpCronService.startCron();
+
     startOpenaiCronIfEnabled();
     startOpenaiCronIfEnabled();
     startAccessTokenCron();
     startAccessTokenCron();
   }
   }
@@ -588,7 +593,11 @@ class Crowi {
     this.socketIoService.attachServer(httpServer);
     this.socketIoService.attachServer(httpServer);
 
 
     // Initialization YjsService
     // Initialization YjsService
-    initializeYjsService(this.socketIoService.io);
+    initializeYjsService(
+      httpServer,
+      this.socketIoService.io,
+      this.sessionConfig,
+    );
 
 
     await this.autoInstall();
     await this.autoInstall();
 
 

+ 2 - 0
apps/app/src/server/interfaces/attachment.ts

@@ -3,6 +3,7 @@ export const AttachmentType = {
   WIKI_PAGE: 'WIKI_PAGE',
   WIKI_PAGE: 'WIKI_PAGE',
   PROFILE_IMAGE: 'PROFILE_IMAGE',
   PROFILE_IMAGE: 'PROFILE_IMAGE',
   PAGE_BULK_EXPORT: 'PAGE_BULK_EXPORT',
   PAGE_BULK_EXPORT: 'PAGE_BULK_EXPORT',
+  AUDIT_LOG_BULK_EXPORT: 'AUDIT_LOG_BULK_EXPORT',
 } as const;
 } as const;
 
 
 export type AttachmentType =
 export type AttachmentType =
@@ -35,4 +36,5 @@ export const FilePathOnStoragePrefix = {
   attachment: 'attachment',
   attachment: 'attachment',
   user: 'user',
   user: 'user',
   pageBulkExport: 'page-bulk-export',
   pageBulkExport: 'page-bulk-export',
+  auditLogBulkExport: 'audit-log-bulk-export',
 } as const;
 } as const;

+ 1 - 2
apps/app/src/server/models/obsolete-page.js

@@ -7,7 +7,6 @@ import {
 import { isUserPage } from '@growi/core/dist/utils/page-path-utils';
 import { isUserPage } from '@growi/core/dist/utils/page-path-utils';
 import { removeHeadingSlash } from '@growi/core/dist/utils/path-utils';
 import { removeHeadingSlash } from '@growi/core/dist/utils/path-utils';
 import { differenceInYears } from 'date-fns/differenceInYears';
 import { differenceInYears } from 'date-fns/differenceInYears';
-import escapeStringRegexp from 'escape-string-regexp';
 
 
 import { Comment } from '~/features/comment/server/models/comment';
 import { Comment } from '~/features/comment/server/models/comment';
 import ExternalUserGroup from '~/features/external-user-group/server/models/external-user-group';
 import ExternalUserGroup from '~/features/external-user-group/server/models/external-user-group';
@@ -688,7 +687,7 @@ export const getPageSchema = (crowi) => {
     const regexpList = pathList.map((path) => {
     const regexpList = pathList.map((path) => {
       const pathWithTrailingSlash = pathUtils.addTrailingSlash(path);
       const pathWithTrailingSlash = pathUtils.addTrailingSlash(path);
       return new RegExp(
       return new RegExp(
-        `^${escapeStringRegexp(pathWithTrailingSlash)}_{1,2}template$`,
+        `^${RegExp.escape(pathWithTrailingSlash)}_{1,2}template$`,
       );
       );
     });
     });
 
 

+ 7 - 8
apps/app/src/server/models/page.ts

@@ -10,7 +10,6 @@ import {
   normalizePath,
   normalizePath,
 } from '@growi/core/dist/utils/path-utils';
 } from '@growi/core/dist/utils/path-utils';
 import assert from 'assert';
 import assert from 'assert';
-import escapeStringRegexp from 'escape-string-regexp';
 import type mongoose from 'mongoose';
 import type mongoose from 'mongoose';
 import type {
 import type {
   AnyObject,
   AnyObject,
@@ -348,7 +347,7 @@ export class PageQueryBuilder {
     const pathNormalized = normalizePath(path);
     const pathNormalized = normalizePath(path);
     const pathWithTrailingSlash = addTrailingSlash(path);
     const pathWithTrailingSlash = addTrailingSlash(path);
 
 
-    const startsPattern = escapeStringRegexp(pathWithTrailingSlash);
+    const startsPattern = RegExp.escape(pathWithTrailingSlash);
 
 
     this.query = this.query.and({
     this.query = this.query.and({
       $or: [
       $or: [
@@ -373,7 +372,7 @@ export class PageQueryBuilder {
 
 
     const pathWithTrailingSlash = addTrailingSlash(path);
     const pathWithTrailingSlash = addTrailingSlash(path);
 
 
-    const startsPattern = escapeStringRegexp(pathWithTrailingSlash);
+    const startsPattern = RegExp.escape(pathWithTrailingSlash);
 
 
     this.query = this.query.and({ path: new RegExp(`^${startsPattern}`) });
     this.query = this.query.and({ path: new RegExp(`^${startsPattern}`) });
 
 
@@ -409,7 +408,7 @@ export class PageQueryBuilder {
       return this;
       return this;
     }
     }
 
 
-    const startsPattern = escapeStringRegexp(path);
+    const startsPattern = RegExp.escape(path);
 
 
     this.query = this.query.and({ path: new RegExp(`^${startsPattern}`) });
     this.query = this.query.and({ path: new RegExp(`^${startsPattern}`) });
 
 
@@ -424,7 +423,7 @@ export class PageQueryBuilder {
       return this;
       return this;
     }
     }
 
 
-    const startsPattern = escapeStringRegexp(str);
+    const startsPattern = RegExp.escape(str);
 
 
     this.query = this.query.and({
     this.query = this.query.and({
       path: new RegExp(`^(?!${startsPattern}).*$`),
       path: new RegExp(`^(?!${startsPattern}).*$`),
@@ -440,7 +439,7 @@ export class PageQueryBuilder {
       return this;
       return this;
     }
     }
 
 
-    const startsPattern = escapeStringRegexp(path);
+    const startsPattern = RegExp.escape(path);
 
 
     this.query = this.query.and({
     this.query = this.query.and({
       path: { $not: new RegExp(`^${startsPattern}(/|$)`) },
       path: { $not: new RegExp(`^${startsPattern}(/|$)`) },
@@ -455,7 +454,7 @@ export class PageQueryBuilder {
       return this;
       return this;
     }
     }
 
 
-    const match = escapeStringRegexp(str);
+    const match = RegExp.escape(str);
 
 
     this.query = this.query.and({ path: new RegExp(`^(?=.*${match}).*$`) });
     this.query = this.query.and({ path: new RegExp(`^(?=.*${match}).*$`) });
 
 
@@ -468,7 +467,7 @@ export class PageQueryBuilder {
       return this;
       return this;
     }
     }
 
 
-    const match = escapeStringRegexp(str);
+    const match = RegExp.escape(str);
 
 
     this.query = this.query.and({ path: new RegExp(`^(?!.*${match}).*$`) });
     this.query = this.query.and({ path: new RegExp(`^(?!.*${match}).*$`) });
 
 

+ 2 - 0
apps/app/src/server/routes/apiv3/index.js

@@ -1,4 +1,5 @@
 import { factory as aiToolsRouteFactory } from '~/features/ai-tools/server/routes/apiv3';
 import { factory as aiToolsRouteFactory } from '~/features/ai-tools/server/routes/apiv3';
+import { factory as auditLogBulkExportRouteFactory } from '~/features/audit-log-bulk-export/server/routes/apiv3';
 import growiPlugin from '~/features/growi-plugin/server/routes/apiv3/admin';
 import growiPlugin from '~/features/growi-plugin/server/routes/apiv3/admin';
 import { factory as openaiRouteFactory } from '~/features/openai/server/routes';
 import { factory as openaiRouteFactory } from '~/features/openai/server/routes';
 import { allreadyInstalledMiddleware } from '~/server/middlewares/application-not-installed';
 import { allreadyInstalledMiddleware } from '~/server/middlewares/application-not-installed';
@@ -186,6 +187,7 @@ module.exports = (crowi, app) => {
       crowi,
       crowi,
     ),
     ),
   );
   );
+  router.use('/audit-log-bulk-export', auditLogBulkExportRouteFactory(crowi));
 
 
   router.use('/openai', openaiRouteFactory(crowi));
   router.use('/openai', openaiRouteFactory(crowi));
 
 

+ 1 - 2
apps/app/src/server/routes/apiv3/users.js

@@ -2,7 +2,6 @@ import { SCOPE } from '@growi/core/dist/interfaces';
 import { ErrorV3 } from '@growi/core/dist/models';
 import { ErrorV3 } from '@growi/core/dist/models';
 import { serializeUserSecurely } from '@growi/core/dist/models/serializers';
 import { serializeUserSecurely } from '@growi/core/dist/models/serializers';
 import { userHomepagePath } from '@growi/core/dist/utils/page-path-utils';
 import { userHomepagePath } from '@growi/core/dist/utils/page-path-utils';
-import escapeStringRegexp from 'escape-string-regexp';
 import express from 'express';
 import express from 'express';
 import { body, query } from 'express-validator';
 import { body, query } from 'express-validator';
 import path from 'pathe';
 import path from 'pathe';
@@ -336,7 +335,7 @@ module.exports = (crowi) => {
 
 
       // Search from input
       // Search from input
       const searchText = req.query.searchText || '';
       const searchText = req.query.searchText || '';
-      const searchWord = new RegExp(escapeStringRegexp(searchText));
+      const searchWord = new RegExp(RegExp.escape(searchText));
       // Sort
       // Sort
       const { sort, sortOrder } = req.query;
       const { sort, sortOrder } = req.query;
       const sortOutput = {
       const sortOutput = {

+ 31 - 20
apps/app/src/server/service/file-uploader/aws/index.ts

@@ -13,6 +13,7 @@ import {
   PutObjectCommand,
   PutObjectCommand,
   S3Client,
   S3Client,
 } from '@aws-sdk/client-s3';
 } from '@aws-sdk/client-s3';
+import { Upload } from '@aws-sdk/lib-storage';
 import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
 import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
 import type { NonBlankString } from '@growi/core/dist/interfaces';
 import type { NonBlankString } from '@growi/core/dist/interfaces';
 import { toNonBlankStringOrUndefined } from '@growi/core/dist/interfaces';
 import { toNonBlankStringOrUndefined } from '@growi/core/dist/interfaces';
@@ -252,30 +253,40 @@ class AwsFileUploader extends AbstractFileUploader {
     const filePath = getFilePathOnStorage(attachment);
     const filePath = getFilePathOnStorage(attachment);
     const contentHeaders = createContentHeaders(attachment);
     const contentHeaders = createContentHeaders(attachment);
 
 
-    try {
-      const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
+    const uploadTimeout = configManager.getConfig('app:fileUploadTimeout');
 
 
-      await s3.send(
-        new PutObjectCommand({
-          Bucket: getS3Bucket(),
-          Key: filePath,
-          Body: readable,
-          ACL: getS3PutObjectCannedAcl(),
-          // put type and the file name for reference information when uploading
-          ContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
-          ContentDisposition: getContentHeaderValue(
-            contentHeaders,
-            'Content-Disposition',
-          ),
-        }),
-        { abortSignal: AbortSignal.timeout(uploadTimeout) },
-      );
+    // Use @aws-sdk/lib-storage Upload to handle streaming uploads:
+    // - Resolves archiver's readable-stream (npm) failing AWS SDK's instanceof Readable check
+    // - Avoids Transfer-Encoding: chunked which S3 rejects with 501 (PutObjectCommand issue)
+    // - Under 5MB: falls back to PutObjectCommand internally
+    // - Over 5MB: uses multipart upload (requires s3:AbortMultipartUpload permission)
+    const upload = new Upload({
+      client: s3,
+      params: {
+        Bucket: getS3Bucket(),
+        Key: filePath,
+        Body: readable,
+        ACL: getS3PutObjectCannedAcl(),
+        ContentType: getContentHeaderValue(contentHeaders, 'Content-Type'),
+        ContentDisposition: getContentHeaderValue(
+          contentHeaders,
+          'Content-Disposition',
+        ),
+      },
+    });
+
+    const timeoutId = setTimeout(() => {
+      logger.warn(`Upload timeout: fileName=${attachment.fileName}`);
+      upload.abort();
+    }, uploadTimeout);
+
+    try {
+      await upload.done();
 
 
       logger.debug(
       logger.debug(
         `File upload completed successfully: fileName=${attachment.fileName}`,
         `File upload completed successfully: fileName=${attachment.fileName}`,
       );
       );
     } catch (error) {
     } catch (error) {
-      // Handle timeout error specifically
       if (error.name === 'AbortError') {
       if (error.name === 'AbortError') {
         logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
         logger.warn(`Upload timeout: fileName=${attachment.fileName}`, error);
       } else {
       } else {
@@ -284,9 +295,9 @@ class AwsFileUploader extends AbstractFileUploader {
           error,
           error,
         );
         );
       }
       }
-      // Re-throw the error to be handled by the caller.
-      // The pipeline automatically handles stream cleanup on error.
       throw error;
       throw error;
+    } finally {
+      clearTimeout(timeoutId);
     }
     }
   }
   }
 
 

+ 3 - 2
apps/app/src/server/service/in-app-notification.ts

@@ -3,6 +3,7 @@ import { SubscriptionStatusType } from '@growi/core';
 import { subDays } from 'date-fns/subDays';
 import { subDays } from 'date-fns/subDays';
 import type { FilterQuery, Types, UpdateQuery } from 'mongoose';
 import type { FilterQuery, Types, UpdateQuery } from 'mongoose';
 
 
+import type { IAuditLogBulkExportJob } from '~/features/audit-log-bulk-export/interfaces/audit-log-bulk-export';
 import type { IPageBulkExportJob } from '~/features/page-bulk-export/interfaces/page-bulk-export';
 import type { IPageBulkExportJob } from '~/features/page-bulk-export/interfaces/page-bulk-export';
 import { AllEssentialActions } from '~/interfaces/activity';
 import { AllEssentialActions } from '~/interfaces/activity';
 import type { PaginateResult } from '~/interfaces/in-app-notification';
 import type { PaginateResult } from '~/interfaces/in-app-notification';
@@ -48,7 +49,7 @@ export default class InAppNotificationService {
       'updated',
       'updated',
       async (
       async (
         activity: ActivityDocument,
         activity: ActivityDocument,
-        target: IUser | IPage | IPageBulkExportJob,
+        target: IUser | IPage | IPageBulkExportJob | IAuditLogBulkExportJob,
         preNotify: PreNotify,
         preNotify: PreNotify,
       ) => {
       ) => {
         try {
         try {
@@ -224,7 +225,7 @@ export default class InAppNotificationService {
 
 
   createInAppNotification = async function (
   createInAppNotification = async function (
     activity: ActivityDocument,
     activity: ActivityDocument,
-    target: IUser | IPage | IPageBulkExportJob,
+    target: IUser | IPage | IPageBulkExportJob | IAuditLogBulkExportJob,
     preNotify: PreNotify,
     preNotify: PreNotify,
   ): Promise<void> {
   ): Promise<void> {
     const shouldNotification =
     const shouldNotification =

+ 4 - 3
apps/app/src/server/service/in-app-notification/in-app-notification-utils.ts

@@ -1,5 +1,6 @@
 import type { IPage, IUser } from '@growi/core';
 import type { IPage, IUser } from '@growi/core';
 
 
+import type { IAuditLogBulkExportJob } from '~/features/audit-log-bulk-export/interfaces/audit-log-bulk-export';
 import type { IPageBulkExportJob } from '~/features/page-bulk-export/interfaces/page-bulk-export';
 import type { IPageBulkExportJob } from '~/features/page-bulk-export/interfaces/page-bulk-export';
 import { SupportedTargetModel } from '~/interfaces/activity';
 import { SupportedTargetModel } from '~/interfaces/activity';
 import * as pageSerializers from '~/models/serializers/in-app-notification-snapshot/page';
 import * as pageSerializers from '~/models/serializers/in-app-notification-snapshot/page';
@@ -7,14 +8,14 @@ import * as pageBulkExportJobSerializers from '~/models/serializers/in-app-notif
 
 
 const isIPage = (
 const isIPage = (
   targetModel: string,
   targetModel: string,
-  target: IUser | IPage | IPageBulkExportJob,
+  target: IUser | IPage | IPageBulkExportJob | IAuditLogBulkExportJob,
 ): target is IPage => {
 ): target is IPage => {
   return targetModel === SupportedTargetModel.MODEL_PAGE;
   return targetModel === SupportedTargetModel.MODEL_PAGE;
 };
 };
 
 
 const isIPageBulkExportJob = (
 const isIPageBulkExportJob = (
   targetModel: string,
   targetModel: string,
-  target: IUser | IPage | IPageBulkExportJob,
+  target: IUser | IPage | IPageBulkExportJob | IAuditLogBulkExportJob,
 ): target is IPageBulkExportJob => {
 ): target is IPageBulkExportJob => {
   return targetModel === SupportedTargetModel.MODEL_PAGE_BULK_EXPORT_JOB;
   return targetModel === SupportedTargetModel.MODEL_PAGE_BULK_EXPORT_JOB;
 };
 };
@@ -22,7 +23,7 @@ const isIPageBulkExportJob = (
 // snapshots are infos about the target that are displayed in the notification, which should not change on target update/deletion
 // snapshots are infos about the target that are displayed in the notification, which should not change on target update/deletion
 export const generateSnapshot = async (
 export const generateSnapshot = async (
   targetModel: string,
   targetModel: string,
-  target: IUser | IPage | IPageBulkExportJob,
+  target: IUser | IPage | IPageBulkExportJob | IAuditLogBulkExportJob,
 ): Promise<string | undefined> => {
 ): Promise<string | undefined> => {
   let snapshot: string | undefined;
   let snapshot: string | undefined;
 
 

+ 1 - 5
apps/app/src/server/service/page-grant.ts

@@ -6,7 +6,6 @@ import {
   PageGrant,
   PageGrant,
 } from '@growi/core';
 } from '@growi/core';
 import { pagePathUtils, pageUtils, pathUtils } from '@growi/core/dist/utils';
 import { pagePathUtils, pageUtils, pathUtils } from '@growi/core/dist/utils';
-import escapeStringRegexp from 'escape-string-regexp';
 import mongoose, { type HydratedDocument } from 'mongoose';
 import mongoose, { type HydratedDocument } from 'mongoose';
 
 
 import type { ExternalGroupProviderType } from '~/features/external-user-group/interfaces/external-user-group';
 import type { ExternalGroupProviderType } from '~/features/external-user-group/interfaces/external-user-group';
@@ -590,10 +589,7 @@ class PageGrantService implements IPageGrantService {
     };
     };
 
 
     const commonCondition = {
     const commonCondition = {
-      path: new RegExp(
-        `^${escapeStringRegexp(addTrailingSlash(targetPath))}`,
-        'i',
-      ),
+      path: new RegExp(`^${RegExp.escape(addTrailingSlash(targetPath))}`, 'i'),
       isEmpty: false,
       isEmpty: false,
     };
     };
 
 

+ 9 - 18
apps/app/src/server/service/page/index.ts

@@ -18,7 +18,6 @@ import type {
 } from '@growi/core/dist/interfaces';
 } from '@growi/core/dist/interfaces';
 import { PageGrant } from '@growi/core/dist/interfaces';
 import { PageGrant } from '@growi/core/dist/interfaces';
 import { pagePathUtils, pathUtils } from '@growi/core/dist/utils';
 import { pagePathUtils, pathUtils } from '@growi/core/dist/utils';
-import escapeStringRegexp from 'escape-string-regexp';
 import type EventEmitter from 'events';
 import type EventEmitter from 'events';
 import type { Cursor, HydratedDocument } from 'mongoose';
 import type { Cursor, HydratedDocument } from 'mongoose';
 import mongoose from 'mongoose';
 import mongoose from 'mongoose';
@@ -941,7 +940,7 @@ class PageService implements IPageService {
   }
   }
 
 
   private isRenamingToUnderTarget(fromPath: string, toPath: string): boolean {
   private isRenamingToUnderTarget(fromPath: string, toPath: string): boolean {
-    const pathToTest = escapeStringRegexp(addTrailingSlash(fromPath));
+    const pathToTest = RegExp.escape(addTrailingSlash(fromPath));
     const pathToBeTested = toPath;
     const pathToBeTested = toPath;
 
 
     return new RegExp(`^${pathToTest}`, 'i').test(pathToBeTested);
     return new RegExp(`^${pathToTest}`, 'i').test(pathToBeTested);
@@ -1245,10 +1244,7 @@ class PageService implements IPageService {
     const batchStream = createBatchStream(BULK_REINDEX_SIZE);
     const batchStream = createBatchStream(BULK_REINDEX_SIZE);
 
 
     const newPagePathPrefix = newPagePathSanitized;
     const newPagePathPrefix = newPagePathSanitized;
-    const pathRegExp = new RegExp(
-      `^${escapeStringRegexp(targetPage.path)}`,
-      'i',
-    );
+    const pathRegExp = new RegExp(`^${RegExp.escape(targetPage.path)}`, 'i');
 
 
     const renameDescendants = this.renameDescendants.bind(this);
     const renameDescendants = this.renameDescendants.bind(this);
     const pageEvent = this.pageEvent;
     const pageEvent = this.pageEvent;
@@ -1304,10 +1300,7 @@ class PageService implements IPageService {
     const batchStream = createBatchStream(BULK_REINDEX_SIZE);
     const batchStream = createBatchStream(BULK_REINDEX_SIZE);
 
 
     const newPagePathPrefix = newPagePathSanitized;
     const newPagePathPrefix = newPagePathSanitized;
-    const pathRegExp = new RegExp(
-      `^${escapeStringRegexp(targetPage.path)}`,
-      'i',
-    );
+    const pathRegExp = new RegExp(`^${RegExp.escape(targetPage.path)}`, 'i');
 
 
     const renameDescendants = this.renameDescendants.bind(this);
     const renameDescendants = this.renameDescendants.bind(this);
     const pageEvent = this.pageEvent;
     const pageEvent = this.pageEvent;
@@ -1892,7 +1885,7 @@ class PageService implements IPageService {
     const batchStream = createBatchStream(BULK_REINDEX_SIZE);
     const batchStream = createBatchStream(BULK_REINDEX_SIZE);
 
 
     const newPagePathPrefix = newPagePathSanitized;
     const newPagePathPrefix = newPagePathSanitized;
-    const pathRegExp = new RegExp(`^${escapeStringRegexp(page.path)}`, 'i');
+    const pathRegExp = new RegExp(`^${RegExp.escape(page.path)}`, 'i');
 
 
     const duplicateDescendants = this.duplicateDescendants.bind(this);
     const duplicateDescendants = this.duplicateDescendants.bind(this);
     const pageEvent = this.pageEvent;
     const pageEvent = this.pageEvent;
@@ -1948,7 +1941,7 @@ class PageService implements IPageService {
     const batchStream = createBatchStream(BULK_REINDEX_SIZE);
     const batchStream = createBatchStream(BULK_REINDEX_SIZE);
 
 
     const newPagePathPrefix = newPagePathSanitized;
     const newPagePathPrefix = newPagePathSanitized;
-    const pathRegExp = new RegExp(`^${escapeStringRegexp(page.path)}`, 'i');
+    const pathRegExp = new RegExp(`^${RegExp.escape(page.path)}`, 'i');
 
 
     const duplicateDescendants = this.duplicateDescendants.bind(this);
     const duplicateDescendants = this.duplicateDescendants.bind(this);
     const pageEvent = this.pageEvent;
     const pageEvent = this.pageEvent;
@@ -3968,7 +3961,7 @@ class PageService implements IPageService {
     const ancestorPaths = paths.flatMap((p) => collectAncestorPaths(p, []));
     const ancestorPaths = paths.flatMap((p) => collectAncestorPaths(p, []));
     // targets' descendants
     // targets' descendants
     const pathAndRegExpsToNormalize: (RegExp | string)[] = paths.map(
     const pathAndRegExpsToNormalize: (RegExp | string)[] = paths.map(
-      (p) => new RegExp(`^${escapeStringRegexp(addTrailingSlash(p))}`, 'i'),
+      (p) => new RegExp(`^${RegExp.escape(addTrailingSlash(p))}`, 'i'),
     );
     );
     // include targets' path
     // include targets' path
     pathAndRegExpsToNormalize.push(...paths);
     pathAndRegExpsToNormalize.push(...paths);
@@ -4179,7 +4172,7 @@ class PageService implements IPageService {
           const parentId = parent._id;
           const parentId = parent._id;
 
 
           // Build filter
           // Build filter
-          const parentPathEscaped = escapeStringRegexp(
+          const parentPathEscaped = RegExp.escape(
             parent.path === '/' ? '' : parent.path,
             parent.path === '/' ? '' : parent.path,
           ); // adjust the path for RegExp
           ); // adjust the path for RegExp
           const filter: any = {
           const filter: any = {
@@ -5148,9 +5141,7 @@ class PageService implements IPageService {
     const wasOnTree = exPage.parent != null || isTopPage(exPage.path);
     const wasOnTree = exPage.parent != null || isTopPage(exPage.path);
     const shouldBeOnTree = currentPage.grant !== PageGrant.GRANT_RESTRICTED;
     const shouldBeOnTree = currentPage.grant !== PageGrant.GRANT_RESTRICTED;
     const isChildrenExist = await Page.count({
     const isChildrenExist = await Page.count({
-      path: new RegExp(
-        `^${escapeStringRegexp(addTrailingSlash(currentPage.path))}`,
-      ),
+      path: new RegExp(`^${RegExp.escape(addTrailingSlash(currentPage.path))}`),
       parent: { $ne: null },
       parent: { $ne: null },
     });
     });
 
 
@@ -5282,7 +5273,7 @@ class PageService implements IPageService {
     const shouldBeOnTree = grant !== PageGrant.GRANT_RESTRICTED;
     const shouldBeOnTree = grant !== PageGrant.GRANT_RESTRICTED;
     const isChildrenExist = await Page.count({
     const isChildrenExist = await Page.count({
       path: new RegExp(
       path: new RegExp(
-        `^${escapeStringRegexp(addTrailingSlash(clonedPageData.path))}`,
+        `^${RegExp.escape(addTrailingSlash(clonedPageData.path))}`,
       ),
       ),
       parent: { $ne: null },
       parent: { $ne: null },
     });
     });

+ 3 - 0
apps/app/src/server/service/socket-io/socket-io.ts

@@ -43,6 +43,9 @@ export class SocketIoService {
   async attachServer(server) {
   async attachServer(server) {
     this.io = new Server(server, {
     this.io = new Server(server, {
       serveClient: false,
       serveClient: false,
+      // Allow non-Socket.IO WebSocket upgrade requests (e.g. /yjs/) to pass through
+      // without being destroyed by engine.io's default timeout handler
+      destroyUpgrade: false,
     });
     });
 
 
     // create namespace for admin
     // create namespace for admin

+ 61 - 20
apps/app/src/server/service/yjs/create-mongodb-persistence.ts

@@ -1,23 +1,38 @@
-import type { Persistence } from 'y-socket.io/dist/server';
+import { YDocStatus } from '@growi/core/dist/consts';
+import type { Server } from 'socket.io';
+import type { WSSharedDoc, YWebsocketPersistence } from 'y-websocket/bin/utils';
 import * as Y from 'yjs';
 import * as Y from 'yjs';
 
 
+import { SocketEventName } from '~/interfaces/websocket';
+import {
+  getRoomNameWithId,
+  RoomPrefix,
+} from '~/server/service/socket-io/helper';
 import loggerFactory from '~/utils/logger';
 import loggerFactory from '~/utils/logger';
 
 
 import type { MongodbPersistence } from './extended/mongodb-persistence';
 import type { MongodbPersistence } from './extended/mongodb-persistence';
+import type { syncYDoc as syncYDocType } from './sync-ydoc';
 
 
 const logger = loggerFactory('growi:service:yjs:create-mongodb-persistence');
 const logger = loggerFactory('growi:service:yjs:create-mongodb-persistence');
 
 
+type GetYDocStatus = (pageId: string) => Promise<YDocStatus>;
+
 /**
 /**
- * Based on the example by https://github.com/MaxNoetzold/y-mongodb-provider?tab=readme-ov-file#an-other-example
- * @param mdb
- * @returns
+ * Creates a y-websocket compatible persistence layer backed by MongoDB.
+ *
+ * bindState also handles:
+ * - sync-on-load (syncYDoc) after persisted state is applied
+ * - awareness event bridge to Socket.IO rooms
  */
  */
 export const createMongoDBPersistence = (
 export const createMongoDBPersistence = (
   mdb: MongodbPersistence,
   mdb: MongodbPersistence,
-): Persistence => {
-  const persistece: Persistence = {
+  io: Server,
+  syncYDoc: typeof syncYDocType,
+  getYDocStatus: GetYDocStatus,
+): YWebsocketPersistence => {
+  const persistence: YWebsocketPersistence = {
     provider: mdb,
     provider: mdb,
-    bindState: async (docName, ydoc) => {
+    bindState: async (docName: string, ydoc: WSSharedDoc) => {
       logger.debug('bindState', { docName });
       logger.debug('bindState', { docName });
 
 
       const persistedYdoc = await mdb.getYDoc(docName);
       const persistedYdoc = await mdb.getYDoc(docName);
@@ -27,12 +42,7 @@ export const createMongoDBPersistence = (
       const diff = Y.encodeStateAsUpdate(ydoc, persistedStateVector);
       const diff = Y.encodeStateAsUpdate(ydoc, persistedStateVector);
 
 
       // store the new data in db (if there is any: empty update is an array of 0s)
       // store the new data in db (if there is any: empty update is an array of 0s)
-      if (
-        diff.reduce(
-          (previousValue, currentValue) => previousValue + currentValue,
-          0,
-        ) > 0
-      ) {
+      if (diff.some((b) => b !== 0)) {
         mdb.storeUpdate(docName, diff);
         mdb.storeUpdate(docName, diff);
         mdb.setTypedMeta(docName, 'updatedAt', Date.now());
         mdb.setTypedMeta(docName, 'updatedAt', Date.now());
       }
       }
@@ -40,23 +50,54 @@ export const createMongoDBPersistence = (
       // send the persisted data to clients
       // send the persisted data to clients
       Y.applyUpdate(ydoc, Y.encodeStateAsUpdate(persistedYdoc));
       Y.applyUpdate(ydoc, Y.encodeStateAsUpdate(persistedYdoc));
 
 
+      // cleanup some memory
+      persistedYdoc.destroy();
+
+      // sync with the latest revision after persisted state is applied
+      const ydocStatus = await getYDocStatus(docName);
+      syncYDoc(mdb, ydoc, { ydocStatus });
+
       // store updates of the document in db
       // store updates of the document in db
-      ydoc.on('update', async (update) => {
+      ydoc.on('update', (update: Uint8Array) => {
         mdb.storeUpdate(docName, update);
         mdb.storeUpdate(docName, update);
         mdb.setTypedMeta(docName, 'updatedAt', Date.now());
         mdb.setTypedMeta(docName, 'updatedAt', Date.now());
       });
       });
 
 
-      // cleanup some memory
-      persistedYdoc.destroy();
+      // register awareness event bridge to Socket.IO rooms
+      // Only emit when the awareness state size actually changes (cursor moves
+      // and other updates fire frequently but don't change the user count)
+      let lastEmittedSize = -1;
+      ydoc.awareness.on('update', async () => {
+        const pageId = docName;
+        const awarenessStateSize = ydoc.awareness.getStates().size;
+
+        if (awarenessStateSize !== lastEmittedSize) {
+          lastEmittedSize = awarenessStateSize;
+          io.in(getRoomNameWithId(RoomPrefix.PAGE, pageId)).emit(
+            SocketEventName.YjsAwarenessStateSizeUpdated,
+            awarenessStateSize,
+          );
+        }
+
+        // emit draft status when last user leaves
+        if (awarenessStateSize === 0) {
+          const status = await getYDocStatus(pageId);
+          const hasYdocsNewerThanLatestRevision =
+            status === YDocStatus.DRAFT || status === YDocStatus.ISOLATED;
+
+          io.in(getRoomNameWithId(RoomPrefix.PAGE, pageId)).emit(
+            SocketEventName.YjsHasYdocsNewerThanLatestRevisionUpdated,
+            hasYdocsNewerThanLatestRevision,
+          );
+        }
+      });
     },
     },
-    writeState: async (docName) => {
+    writeState: async (docName: string) => {
       logger.debug('writeState', { docName });
       logger.debug('writeState', { docName });
-      // This is called when all connections to the document are closed.
-
       // flush document on close to have the smallest possible database
       // flush document on close to have the smallest possible database
       await mdb.flushDocument(docName);
       await mdb.flushDocument(docName);
     },
     },
   };
   };
 
 
-  return persistece;
+  return persistence;
 };
 };

+ 159 - 0
apps/app/src/server/service/yjs/guard-socket.spec.ts

@@ -0,0 +1,159 @@
+import http from 'node:http';
+import WebSocket, { WebSocketServer } from 'ws';
+import { docs, setPersistence, setupWSConnection } from 'y-websocket/bin/utils';
+
+import { guardSocket } from './guard-socket';
+
+/**
+ * Creates a test server where:
+ * 1. The Yjs upgrade handler guards the socket and awaits before completing
+ * 2. A hostile handler (simulating Next.js) calls socket.end() for /yjs/ paths
+ */
+const createServerWithHostileHandler = (): {
+  server: http.Server;
+  wss: WebSocketServer;
+} => {
+  const server = http.createServer();
+  const wss = new WebSocketServer({ noServer: true });
+
+  // Yjs handler (registered first — same order as production)
+  server.on('upgrade', async (request, socket, head) => {
+    const url = request.url ?? '';
+    if (!url.startsWith('/yjs/')) return;
+
+    const pageId = url.slice('/yjs/'.length).split('?')[0];
+
+    const guard = guardSocket(socket);
+
+    try {
+      // Simulate async auth delay
+      await new Promise((resolve) => setTimeout(resolve, 10));
+
+      guard.restore();
+
+      wss.handleUpgrade(request, socket, head, (ws) => {
+        wss.emit('connection', ws, request);
+        setupWSConnection(ws, request, { docName: pageId });
+      });
+    } catch {
+      guard.restore();
+      socket.destroy();
+    }
+  });
+
+  // Hostile handler (registered second — simulates Next.js upgradeHandler)
+  server.on('upgrade', (_request, socket) => {
+    socket.end();
+  });
+
+  return { server, wss };
+};
+
+const connectClient = (port: number, pageId: string): Promise<WebSocket> => {
+  return new Promise((resolve, reject) => {
+    const ws = new WebSocket(`ws://127.0.0.1:${port}/yjs/${pageId}`);
+    ws.binaryType = 'arraybuffer';
+    ws.on('open', () => resolve(ws));
+    ws.on('error', reject);
+  });
+};
+
+describe('guardSocket — protection against hostile upgrade handlers', () => {
+  let server: http.Server;
+  let wss: WebSocketServer;
+  let port: number;
+
+  beforeAll(async () => {
+    setPersistence(null);
+
+    const testServer = createServerWithHostileHandler();
+    server = testServer.server;
+    wss = testServer.wss;
+
+    await new Promise<void>((resolve) => {
+      server.listen(0, '127.0.0.1', () => {
+        const addr = server.address();
+        if (addr && typeof addr === 'object') {
+          port = addr.port;
+        }
+        resolve();
+      });
+    });
+  });
+
+  afterAll(async () => {
+    for (const [name, doc] of docs) {
+      doc.destroy();
+      docs.delete(name);
+    }
+
+    await new Promise<void>((resolve) => {
+      wss.close(() => {
+        server.close(() => resolve());
+      });
+    });
+  });
+
+  afterEach(() => {
+    for (const [name, doc] of docs) {
+      doc.destroy();
+      docs.delete(name);
+    }
+  });
+
+  it('should establish WebSocket connection even when a hostile handler calls socket.end()', async () => {
+    const pageId = 'guard-test-001';
+
+    const ws = await connectClient(port, pageId);
+
+    await new Promise((resolve) => setTimeout(resolve, 50));
+
+    const serverDoc = docs.get(pageId);
+    expect(serverDoc).toBeDefined();
+    assert(serverDoc !== undefined);
+    expect(serverDoc.conns.size).toBe(1);
+
+    ws.close();
+  });
+
+  it('should handle multiple concurrent connections with hostile handler', async () => {
+    const pageId = 'guard-test-002';
+
+    const connections = await Promise.all([
+      connectClient(port, pageId),
+      connectClient(port, pageId),
+    ]);
+
+    await new Promise((resolve) => setTimeout(resolve, 50));
+
+    const serverDoc = docs.get(pageId);
+    expect(serverDoc).toBeDefined();
+    assert(serverDoc !== undefined);
+    expect(serverDoc.conns.size).toBe(2);
+
+    for (const ws of connections) {
+      ws.close();
+    }
+  });
+
+  it('should allow normal close after guard is restored', async () => {
+    const pageId = 'guard-test-003';
+
+    const ws = await connectClient(port, pageId);
+
+    await new Promise((resolve) => setTimeout(resolve, 50));
+
+    // Connection succeeds, meaning socket.end/destroy were properly
+    // guarded during async auth and restored before wss.handleUpgrade
+    expect(ws.readyState).toBe(WebSocket.OPEN);
+
+    ws.close();
+    await new Promise((resolve) => setTimeout(resolve, 50));
+
+    // After close, the server doc should have removed the connection
+    const serverDoc = docs.get(pageId);
+    if (serverDoc) {
+      expect(serverDoc.conns.size).toBe(0);
+    }
+  });
+});

+ 30 - 0
apps/app/src/server/service/yjs/guard-socket.ts

@@ -0,0 +1,30 @@
+import type { Duplex } from 'node:stream';
+
+type SocketGuard = {
+  restore: () => void;
+};
+
+/**
+ * Temporarily replaces socket.end() and socket.destroy() with no-ops.
+ *
+ * This prevents other synchronous `upgrade` event listeners (e.g. Next.js's
+ * NextCustomServer.upgradeHandler) from closing the socket while an async
+ * handler is awaiting authentication.
+ *
+ * Call `restore()` on the returned object to reinstate the original methods
+ * before performing the actual WebSocket handshake or cleanup.
+ */
+export const guardSocket = (socket: Duplex): SocketGuard => {
+  const origEnd = socket.end.bind(socket);
+  const origDestroy = socket.destroy.bind(socket);
+
+  socket.end = () => socket;
+  socket.destroy = () => socket;
+
+  return {
+    restore: () => {
+      socket.end = origEnd;
+      socket.destroy = origDestroy;
+    },
+  };
+};

+ 2 - 2
apps/app/src/server/service/yjs/sync-ydoc.ts

@@ -1,6 +1,6 @@
 import { Origin, YDocStatus } from '@growi/core';
 import { Origin, YDocStatus } from '@growi/core';
 import type { Delta } from '@growi/editor';
 import type { Delta } from '@growi/editor';
-import type { Document } from 'y-socket.io/dist/server';
+import type { WSSharedDoc } from 'y-websocket/bin/utils';
 
 
 import loggerFactory from '~/utils/logger';
 import loggerFactory from '~/utils/logger';
 
 
@@ -22,7 +22,7 @@ type Context = {
  */
  */
 export const syncYDoc = async (
 export const syncYDoc = async (
   mdb: MongodbPersistence,
   mdb: MongodbPersistence,
-  doc: Document,
+  doc: WSSharedDoc,
   context: true | Context,
   context: true | Context,
 ): Promise<void> => {
 ): Promise<void> => {
   const pageId = doc.name;
   const pageId = doc.name;

+ 177 - 0
apps/app/src/server/service/yjs/upgrade-handler.spec.ts

@@ -0,0 +1,177 @@
+import type { IncomingMessage } from 'node:http';
+import type { Duplex } from 'node:stream';
+import type { IUserHasId } from '@growi/core';
+import { mock } from 'vitest-mock-extended';
+
+import { createUpgradeHandler } from './upgrade-handler';
+
+type AuthenticatedIncomingMessage = IncomingMessage & { user?: IUserHasId };
+
+interface MockSocket {
+  write: ReturnType<typeof vi.fn>;
+  destroy: ReturnType<typeof vi.fn>;
+}
+
+const { isAccessibleMock } = vi.hoisted(() => ({
+  isAccessibleMock: vi.fn(),
+}));
+
+vi.mock('mongoose', () => ({
+  default: {
+    model: () => ({ isAccessiblePageByViewer: isAccessibleMock }),
+  },
+}));
+
+const { sessionMiddlewareMock } = vi.hoisted(() => ({
+  sessionMiddlewareMock: vi.fn(
+    (_req: unknown, _res: unknown, next: () => void) => next(),
+  ),
+}));
+
+vi.mock('express-session', () => ({
+  default: () => sessionMiddlewareMock,
+}));
+
+vi.mock('passport', () => ({
+  default: {
+    initialize: () => (_req: unknown, _res: unknown, next: () => void) =>
+      next(),
+    session: () => (_req: unknown, _res: unknown, next: () => void) => next(),
+  },
+}));
+
+const sessionConfig = {
+  rolling: true,
+  secret: 'test-secret',
+  resave: false,
+  saveUninitialized: true,
+  cookie: { maxAge: 86400000 },
+  genid: () => 'test-session-id',
+};
+
+const createMockRequest = (
+  url: string,
+  user?: IUserHasId,
+): AuthenticatedIncomingMessage => {
+  const req = mock<AuthenticatedIncomingMessage>();
+  req.url = url;
+  req.headers = { cookie: 'connect.sid=test-session' };
+  req.user = user;
+  return req;
+};
+
+const createMockSocket = (): Duplex & MockSocket => {
+  return {
+    write: vi.fn().mockReturnValue(true),
+    destroy: vi.fn(),
+  } as unknown as Duplex & MockSocket;
+};
+
+describe('UpgradeHandler', () => {
+  const handleUpgrade = createUpgradeHandler(sessionConfig);
+
+  it('should authorize a valid user with page access', async () => {
+    isAccessibleMock.mockResolvedValue(true);
+
+    const request = createMockRequest('/yjs/507f1f77bcf86cd799439011', {
+      _id: 'user1',
+      name: 'Test User',
+    } as unknown as IUserHasId);
+    const socket = createMockSocket();
+    const head = Buffer.alloc(0);
+
+    const result = await handleUpgrade(request, socket, head);
+
+    expect(result.authorized).toBe(true);
+    if (result.authorized) {
+      expect(result.pageId).toBe('507f1f77bcf86cd799439011');
+    }
+  });
+
+  it('should reject with 400 for missing/malformed URL path', async () => {
+    const request = createMockRequest('/invalid/path');
+    const socket = createMockSocket();
+    const head = Buffer.alloc(0);
+
+    const result = await handleUpgrade(request, socket, head);
+
+    expect(result.authorized).toBe(false);
+    if (!result.authorized) {
+      expect(result.statusCode).toBe(400);
+    }
+    expect(socket.write).toHaveBeenCalledWith(expect.stringContaining('400'));
+    expect(socket.destroy).not.toHaveBeenCalled();
+  });
+
+  it('should reject with 403 when user has no page access', async () => {
+    isAccessibleMock.mockResolvedValue(false);
+
+    const request = createMockRequest('/yjs/507f1f77bcf86cd799439011', {
+      _id: 'user1',
+      name: 'Test User',
+    } as unknown as IUserHasId);
+    const socket = createMockSocket();
+    const head = Buffer.alloc(0);
+
+    const result = await handleUpgrade(request, socket, head);
+
+    expect(result.authorized).toBe(false);
+    if (!result.authorized) {
+      expect(result.statusCode).toBe(403);
+    }
+    expect(socket.write).toHaveBeenCalledWith(expect.stringContaining('403'));
+    expect(socket.destroy).not.toHaveBeenCalled();
+  });
+
+  it('should reject with 401 when unauthenticated user has no page access', async () => {
+    isAccessibleMock.mockResolvedValue(false);
+
+    const request = createMockRequest('/yjs/507f1f77bcf86cd799439011');
+    const socket = createMockSocket();
+    const head = Buffer.alloc(0);
+
+    const result = await handleUpgrade(request, socket, head);
+
+    expect(result.authorized).toBe(false);
+    if (!result.authorized) {
+      expect(result.statusCode).toBe(401);
+    }
+    expect(socket.write).toHaveBeenCalledWith(expect.stringContaining('401'));
+    expect(socket.destroy).not.toHaveBeenCalled();
+  });
+
+  it('should allow guest user when page allows guest access', async () => {
+    isAccessibleMock.mockResolvedValue(true);
+
+    const request = createMockRequest('/yjs/507f1f77bcf86cd799439011');
+    const socket = createMockSocket();
+    const head = Buffer.alloc(0);
+
+    const result = await handleUpgrade(request, socket, head);
+
+    expect(result.authorized).toBe(true);
+    if (result.authorized) {
+      expect(result.pageId).toBe('507f1f77bcf86cd799439011');
+    }
+  });
+
+  it('should reject with 401 when session middleware fails', async () => {
+    sessionMiddlewareMock.mockImplementationOnce(
+      (_req: unknown, _res: unknown, next: (err?: unknown) => void) =>
+        next(new Error('session store unavailable')),
+    );
+
+    const request = createMockRequest('/yjs/507f1f77bcf86cd799439011');
+    const socket = createMockSocket();
+    const head = Buffer.alloc(0);
+
+    const result = await handleUpgrade(request, socket, head);
+
+    expect(result.authorized).toBe(false);
+    if (!result.authorized) {
+      expect(result.statusCode).toBe(401);
+    }
+    expect(socket.write).toHaveBeenCalledWith(expect.stringContaining('401'));
+    expect(socket.destroy).not.toHaveBeenCalled();
+  });
+});

+ 131 - 0
apps/app/src/server/service/yjs/upgrade-handler.ts

@@ -0,0 +1,131 @@
+import type { IPage, IUserHasId } from '@growi/core';
+import { YJS_WEBSOCKET_BASE_PATH } from '@growi/core/dist/consts';
+import expressSession from 'express-session';
+import type { IncomingMessage, ServerResponse } from 'http';
+import mongoose from 'mongoose';
+import passport from 'passport';
+import type { Duplex } from 'stream';
+
+import type { SessionConfig } from '~/interfaces/session-config';
+import loggerFactory from '~/utils/logger';
+
+import type { PageModel } from '../../models/page';
+
+const logger = loggerFactory('growi:service:yjs:upgrade-handler');
+
+type AuthenticatedRequest = IncomingMessage & {
+  user?: IUserHasId;
+};
+
+/**
+ * Connect-style middleware that operates on raw Node.js HTTP types.
+ * Express middleware (express-session, passport) is compatible because
+ * express.Request extends IncomingMessage and express.Response extends ServerResponse.
+ */
+type ConnectMiddleware = (
+  req: IncomingMessage,
+  res: ServerResponse,
+  next: (err?: unknown) => void,
+) => void;
+
+/**
+ * Run a Connect-style middleware against a raw IncomingMessage.
+ * Safe for express-session, passport.initialize(), and passport.session() which
+ * only read/write `req` properties and call `next()` — they never write to `res`.
+ */
+const runMiddleware = (
+  middleware: ConnectMiddleware,
+  req: IncomingMessage,
+): Promise<void> =>
+  new Promise((resolve, reject) => {
+    const stubRes = {} as ServerResponse;
+    middleware(req, stubRes, (err?: unknown) => {
+      if (err) return reject(err);
+      resolve();
+    });
+  });
+
+/**
+ * Extracts pageId from upgrade request URL.
+ * Expected format: /yjs/{pageId}
+ */
+const pageIdPattern = new RegExp(`^${YJS_WEBSOCKET_BASE_PATH}/([a-f0-9]{24})`);
+const extractPageId = (url: string | undefined): string | null => {
+  if (url == null) return null;
+  const match = url.match(pageIdPattern);
+  return match?.[1] ?? null;
+};
+
+/**
+ * Writes an HTTP error response to the socket.
+ * Does NOT close the socket — the caller (yjs.ts) manages socket lifecycle
+ * so that guardSocket can safely intercept end/destroy during async auth.
+ */
+const writeErrorResponse = (
+  socket: Duplex,
+  statusCode: number,
+  message: string,
+): void => {
+  socket.write(`HTTP/1.1 ${statusCode} ${message}\r\n\r\n`);
+};
+
+export type UpgradeResult =
+  | { authorized: true; request: AuthenticatedRequest; pageId: string }
+  | { authorized: false; statusCode: number };
+
+/**
+ * Creates an upgrade handler that authenticates WebSocket connections
+ * using the existing express-session + passport mechanism.
+ */
+export const createUpgradeHandler = (sessionConfig: SessionConfig) => {
+  const sessionMiddleware = expressSession(sessionConfig as any);
+  const passportInit = passport.initialize();
+  const passportSession = passport.session();
+
+  return async (
+    request: IncomingMessage,
+    socket: Duplex,
+    _head: Buffer,
+  ): Promise<UpgradeResult> => {
+    const pageId = extractPageId(request.url);
+    if (pageId == null) {
+      logger.warn('Invalid URL path for Yjs upgrade', { url: request.url });
+      writeErrorResponse(socket, 400, 'Bad Request');
+      return { authorized: false, statusCode: 400 };
+    }
+
+    try {
+      // Run session + passport middleware chain
+      await runMiddleware(sessionMiddleware as ConnectMiddleware, request);
+      await runMiddleware(passportInit as ConnectMiddleware, request);
+      await runMiddleware(passportSession as ConnectMiddleware, request);
+    } catch (err) {
+      logger.warn('Session/passport middleware failed on upgrade', { err });
+      writeErrorResponse(socket, 401, 'Unauthorized');
+      return { authorized: false, statusCode: 401 };
+    }
+
+    const user = (request as AuthenticatedRequest).user ?? null;
+
+    // Check page access
+    const Page = mongoose.model<IPage, PageModel>('Page');
+    const isAccessible = await Page.isAccessiblePageByViewer(pageId, user);
+
+    if (!isAccessible) {
+      const statusCode = user == null ? 401 : 403;
+      const message = user == null ? 'Unauthorized' : 'Forbidden';
+      logger.warn(`Yjs upgrade rejected: ${message}`, {
+        pageId,
+        userId: user?._id,
+      });
+      writeErrorResponse(socket, statusCode, message);
+      return { authorized: false, statusCode };
+    }
+
+    return {
+      authorized: true,
+      request: request as AuthenticatedRequest,
+      pageId,
+    };
+  };
+};

+ 205 - 0
apps/app/src/server/service/yjs/websocket-connection.integ.ts

@@ -0,0 +1,205 @@
+import http from 'node:http';
+import WebSocket, { WebSocketServer } from 'ws';
+import { docs, setPersistence, setupWSConnection } from 'y-websocket/bin/utils';
+
+/**
+ * Creates a minimal HTTP + y-websocket server for testing.
+ * No authentication — pure document sync testing.
+ */
+const createTestServer = (): { server: http.Server; wss: WebSocketServer } => {
+  const server = http.createServer();
+  const wss = new WebSocketServer({ noServer: true });
+
+  server.on('upgrade', (request, socket, head) => {
+    const url = request.url ?? '';
+    if (!url.startsWith('/yjs/')) return;
+    const pageId = url.slice('/yjs/'.length).split('?')[0];
+
+    wss.handleUpgrade(request, socket, head, (ws) => {
+      wss.emit('connection', ws, request);
+      setupWSConnection(ws, request, { docName: pageId });
+    });
+  });
+
+  return { server, wss };
+};
+
+/**
+ * Connects a WebSocket client and waits for the connection to open.
+ */
+const connectClient = (port: number, pageId: string): Promise<WebSocket> => {
+  return new Promise((resolve, reject) => {
+    const ws = new WebSocket(`ws://127.0.0.1:${port}/yjs/${pageId}`);
+    ws.binaryType = 'arraybuffer';
+    ws.on('open', () => resolve(ws));
+    ws.on('error', reject);
+  });
+};
+
+/**
+ * Waits for a WebSocket to fully close.
+ */
+const waitForClose = (ws: WebSocket): Promise<void> => {
+  return new Promise((resolve) => {
+    if (ws.readyState === WebSocket.CLOSED) return resolve();
+    ws.on('close', () => resolve());
+  });
+};
+
+describe('WebSocket Connection and Sync Flow', () => {
+  let server: http.Server;
+  let wss: WebSocketServer;
+  let port: number;
+
+  beforeAll(async () => {
+    setPersistence(null);
+
+    const testServer = createTestServer();
+    server = testServer.server;
+    wss = testServer.wss;
+
+    await new Promise<void>((resolve) => {
+      server.listen(0, '127.0.0.1', () => {
+        const addr = server.address();
+        if (addr && typeof addr === 'object') {
+          port = addr.port;
+        }
+        resolve();
+      });
+    });
+  });
+
+  afterAll(async () => {
+    for (const [name, doc] of docs) {
+      doc.destroy();
+      docs.delete(name);
+    }
+
+    await new Promise<void>((resolve) => {
+      wss.close(() => {
+        server.close(() => resolve());
+      });
+    });
+  });
+
+  afterEach(() => {
+    for (const [name, doc] of docs) {
+      doc.destroy();
+      docs.delete(name);
+    }
+  });
+
+  describe('Connection and sync flow', () => {
+    it('should create a server-side Y.Doc on first client connection', async () => {
+      const pageId = 'test-page-sync-001';
+
+      const ws = await connectClient(port, pageId);
+
+      // Wait for setupWSConnection to register the doc
+      await new Promise((resolve) => setTimeout(resolve, 50));
+
+      const serverDoc = docs.get(pageId);
+      assert(serverDoc !== undefined);
+      expect(serverDoc.name).toBe(pageId);
+      expect(serverDoc.conns.size).toBe(1);
+
+      ws.close();
+    });
+
+    it('should register multiple clients on the same server-side Y.Doc', async () => {
+      const pageId = 'test-page-multi-001';
+
+      const ws1 = await connectClient(port, pageId);
+      const ws2 = await connectClient(port, pageId);
+
+      await new Promise((resolve) => setTimeout(resolve, 50));
+
+      const serverDoc = docs.get(pageId);
+      assert(serverDoc !== undefined);
+      expect(serverDoc.conns.size).toBe(2);
+
+      ws1.close();
+      ws2.close();
+    });
+
+    it('should keep the server doc alive when one client disconnects', async () => {
+      const pageId = 'test-page-reconnect-001';
+
+      const ws1 = await connectClient(port, pageId);
+      const ws2 = await connectClient(port, pageId);
+
+      await new Promise((resolve) => setTimeout(resolve, 50));
+
+      // Disconnect client 1
+      ws1.close();
+      await waitForClose(ws1);
+      await new Promise((resolve) => setTimeout(resolve, 50));
+
+      // Server doc should still exist with client 2
+      const serverDoc = docs.get(pageId);
+      assert(serverDoc !== undefined);
+      expect(serverDoc.conns.size).toBe(1);
+
+      ws2.close();
+    });
+  });
+
+  describe('Concurrency — single Y.Doc per page', () => {
+    it('should create exactly one Y.Doc for simultaneous connections', async () => {
+      const pageId = 'test-page-concurrent-001';
+
+      // Connect multiple clients simultaneously
+      const connections = await Promise.all([
+        connectClient(port, pageId),
+        connectClient(port, pageId),
+        connectClient(port, pageId),
+      ]);
+
+      await new Promise((resolve) => setTimeout(resolve, 50));
+
+      // Verify single Y.Doc instance
+      const serverDoc = docs.get(pageId);
+      assert(serverDoc !== undefined);
+      expect(serverDoc.conns.size).toBe(3);
+
+      // Only one doc for this page
+      const matchingDocs = Array.from(docs.values()).filter(
+        (d) => d.name === pageId,
+      );
+      expect(matchingDocs).toHaveLength(1);
+
+      for (const ws of connections) {
+        ws.close();
+      }
+    });
+
+    it('should handle disconnect during connect without document corruption', async () => {
+      const pageId = 'test-page-disconnect-001';
+
+      // Client 1 connects
+      const ws1 = await connectClient(port, pageId);
+      await new Promise((resolve) => setTimeout(resolve, 50));
+
+      // Write to server doc directly
+      const serverDoc = docs.get(pageId);
+      assert(serverDoc !== undefined);
+      serverDoc.getText('codemirror').insert(0, 'Hello World');
+
+      // Client 2 connects and immediately disconnects
+      const ws2 = await connectClient(port, pageId);
+      ws2.close();
+      await waitForClose(ws2);
+      await new Promise((resolve) => setTimeout(resolve, 50));
+
+      // Server doc should still exist with client 1
+      const docAfter = docs.get(pageId);
+      assert(docAfter !== undefined);
+      expect(docAfter.conns.size).toBe(1);
+
+      // Text should be intact
+      expect(docAfter.getText('codemirror').toString()).toBe('Hello World');
+
+      ws1.close();
+    });
+  });
+});

+ 39 - 0
apps/app/src/server/service/yjs/y-websocket-server.d.ts

@@ -0,0 +1,39 @@
+declare module 'y-websocket/bin/utils' {
+  import type { IncomingMessage } from 'http';
+  import type { WebSocket } from 'ws';
+  import type { Awareness } from 'y-protocols/awareness';
+  import * as Y from 'yjs';
+
+  export class WSSharedDoc extends Y.Doc {
+    name: string;
+    conns: Map<WebSocket, Set<number>>;
+    awareness: Awareness;
+    whenInitialized: Promise<void>;
+    constructor(name: string);
+  }
+
+  export interface YWebsocketPersistence {
+    bindState: (docName: string, ydoc: WSSharedDoc) => void;
+    writeState: (docName: string, ydoc: WSSharedDoc) => Promise<void>;
+    provider: unknown;
+  }
+
+  export function setPersistence(
+    persistence: YWebsocketPersistence | null,
+  ): void;
+  export function getPersistence(): YWebsocketPersistence | null;
+
+  export const docs: Map<string, WSSharedDoc>;
+
+  export function getYDoc(docname: string, gc?: boolean): WSSharedDoc;
+
+  export function setupWSConnection(
+    conn: WebSocket,
+    req: IncomingMessage,
+    opts?: { docName?: string; gc?: boolean },
+  ): void;
+
+  export function setContentInitializor(
+    f: (ydoc: Y.Doc) => Promise<void>,
+  ): void;
+}

+ 25 - 10
apps/app/src/server/service/yjs/yjs.integ.ts

@@ -1,3 +1,4 @@
+import http from 'node:http';
 import { YDocStatus } from '@growi/core/dist/consts';
 import { YDocStatus } from '@growi/core/dist/consts';
 import { Types } from 'mongoose';
 import { Types } from 'mongoose';
 import type { Server } from 'socket.io';
 import type { Server } from 'socket.io';
@@ -8,11 +9,15 @@ import type { MongodbPersistence } from './extended/mongodb-persistence';
 import type { IYjsService } from './yjs';
 import type { IYjsService } from './yjs';
 import { getYjsService, initializeYjsService } from './yjs';
 import { getYjsService, initializeYjsService } from './yjs';
 
 
-vi.mock('y-socket.io/dist/server', () => {
-  const YSocketIO = vi.fn();
-  YSocketIO.prototype.on = vi.fn();
-  YSocketIO.prototype.initialize = vi.fn();
-  return { YSocketIO };
+vi.mock('y-websocket/bin/utils', () => {
+  const docs = new Map();
+  return {
+    docs,
+    setPersistence: vi.fn(),
+    setupWSConnection: vi.fn(),
+    getYDoc: vi.fn(),
+    setContentInitializor: vi.fn(),
+  };
 });
 });
 
 
 vi.mock('../revision/normalize-latest-revision-if-broken', () => ({
 vi.mock('../revision/normalize-latest-revision-if-broken', () => ({
@@ -30,16 +35,25 @@ describe('YjsService', () => {
   describe('getYDocStatus()', () => {
   describe('getYDocStatus()', () => {
     beforeAll(() => {
     beforeAll(() => {
       const ioMock = mock<Server>();
       const ioMock = mock<Server>();
+      const httpServer = http.createServer();
+      const sessionConfig = {
+        rolling: true,
+        secret: 'test-secret',
+        resave: false,
+        saveUninitialized: true,
+        cookie: { maxAge: 86400000 },
+        genid: () => 'test-session-id',
+      };
 
 
       // initialize
       // initialize
-      initializeYjsService(ioMock);
+      initializeYjsService(httpServer, ioMock, sessionConfig);
     });
     });
 
 
-    afterAll(async () => {
-      // flush revisions
+    afterEach(async () => {
       await Revision.deleteMany({});
       await Revision.deleteMany({});
+    });
 
 
-      // flush yjs-writings
+    afterAll(async () => {
       const yjsService = getYjsService();
       const yjsService = getYjsService();
       const privateMdb = getPrivateMdbInstance(yjsService);
       const privateMdb = getPrivateMdbInstance(yjsService);
       try {
       try {
@@ -48,7 +62,8 @@ describe('YjsService', () => {
         // Ignore errors that can occur due to async index creation:
         // Ignore errors that can occur due to async index creation:
         // - 26: NamespaceNotFound (collection not yet created)
         // - 26: NamespaceNotFound (collection not yet created)
         // - 276: IndexBuildAborted (cleanup during index creation)
         // - 276: IndexBuildAborted (cleanup during index creation)
-        if (error.code !== 26 && error.code !== 276) {
+        const code = (error as { code?: number }).code;
+        if (code !== 26 && code !== 276) {
           throw error;
           throw error;
         }
         }
       }
       }

+ 72 - 93
apps/app/src/server/service/yjs/yjs.ts

@@ -1,49 +1,47 @@
-import type { IPage, IUserHasId } from '@growi/core';
-import { YDocStatus } from '@growi/core/dist/consts';
-import type { IncomingMessage } from 'http';
+import type http from 'node:http';
+import { YDocStatus, YJS_WEBSOCKET_BASE_PATH } from '@growi/core/dist/consts';
 import mongoose from 'mongoose';
 import mongoose from 'mongoose';
 import type { Server } from 'socket.io';
 import type { Server } from 'socket.io';
-import type { Document } from 'y-socket.io/dist/server';
-import { type Document as Ydoc, YSocketIO } from 'y-socket.io/dist/server';
+import { WebSocketServer } from 'ws';
+import type { WSSharedDoc } from 'y-websocket/bin/utils';
+import { docs, setPersistence, setupWSConnection } from 'y-websocket/bin/utils';
 
 
-import { SocketEventName } from '~/interfaces/websocket';
+import type { SessionConfig } from '~/interfaces/session-config';
 import type { SyncLatestRevisionBody } from '~/interfaces/yjs';
 import type { SyncLatestRevisionBody } from '~/interfaces/yjs';
-import {
-  getRoomNameWithId,
-  RoomPrefix,
-} from '~/server/service/socket-io/helper';
 import loggerFactory from '~/utils/logger';
 import loggerFactory from '~/utils/logger';
 
 
-import type { PageModel } from '../../models/page';
 import { Revision } from '../../models/revision';
 import { Revision } from '../../models/revision';
 import { normalizeLatestRevisionIfBroken } from '../revision/normalize-latest-revision-if-broken';
 import { normalizeLatestRevisionIfBroken } from '../revision/normalize-latest-revision-if-broken';
 import { createIndexes } from './create-indexes';
 import { createIndexes } from './create-indexes';
 import { createMongoDBPersistence } from './create-mongodb-persistence';
 import { createMongoDBPersistence } from './create-mongodb-persistence';
 import { MongodbPersistence } from './extended/mongodb-persistence';
 import { MongodbPersistence } from './extended/mongodb-persistence';
+import { guardSocket } from './guard-socket';
 import { syncYDoc } from './sync-ydoc';
 import { syncYDoc } from './sync-ydoc';
+import { createUpgradeHandler } from './upgrade-handler';
 
 
 const MONGODB_PERSISTENCE_COLLECTION_NAME = 'yjs-writings';
 const MONGODB_PERSISTENCE_COLLECTION_NAME = 'yjs-writings';
 const MONGODB_PERSISTENCE_FLUSH_SIZE = 100;
 const MONGODB_PERSISTENCE_FLUSH_SIZE = 100;
+const YJS_PATH_PREFIX = `${YJS_WEBSOCKET_BASE_PATH}/`;
 
 
 const logger = loggerFactory('growi:service:yjs');
 const logger = loggerFactory('growi:service:yjs');
 
 
-type RequestWithUser = IncomingMessage & { user: IUserHasId };
-
 export interface IYjsService {
 export interface IYjsService {
   getYDocStatus(pageId: string): Promise<YDocStatus>;
   getYDocStatus(pageId: string): Promise<YDocStatus>;
   syncWithTheLatestRevisionForce(
   syncWithTheLatestRevisionForce(
     pageId: string,
     pageId: string,
     editingMarkdownLength?: number,
     editingMarkdownLength?: number,
   ): Promise<SyncLatestRevisionBody>;
   ): Promise<SyncLatestRevisionBody>;
-  getCurrentYdoc(pageId: string): Ydoc | undefined;
+  getCurrentYdoc(pageId: string): WSSharedDoc | undefined;
 }
 }
 
 
 class YjsService implements IYjsService {
 class YjsService implements IYjsService {
-  private ysocketio: YSocketIO;
-
   private mdb: MongodbPersistence;
   private mdb: MongodbPersistence;
 
 
-  constructor(io: Server) {
+  constructor(
+    httpServer: http.Server,
+    io: Server,
+    sessionConfig: SessionConfig,
+  ) {
     const mdb = new MongodbPersistence(
     const mdb = new MongodbPersistence(
       {
       {
         // TODO: Required upgrading mongoose and unifying the versions of mongodb to omit 'as any'
         // TODO: Required upgrading mongoose and unifying the versions of mongodb to omit 'as any'
@@ -57,80 +55,62 @@ class YjsService implements IYjsService {
     );
     );
     this.mdb = mdb;
     this.mdb = mdb;
 
 
-    // initialize YSocketIO
-    const ysocketio = new YSocketIO(io);
-    this.injectPersistence(ysocketio, mdb);
-    ysocketio.initialize();
-    this.ysocketio = ysocketio;
-
     // create indexes
     // create indexes
     createIndexes(MONGODB_PERSISTENCE_COLLECTION_NAME);
     createIndexes(MONGODB_PERSISTENCE_COLLECTION_NAME);
 
 
-    // register middlewares
-    this.registerAccessiblePageChecker(ysocketio);
-
-    ysocketio.on('document-loaded', async (doc: Document) => {
-      const pageId = doc.name;
-
-      const ydocStatus = await this.getYDocStatus(pageId);
-
-      syncYDoc(mdb, doc, { ydocStatus });
-    });
-
-    ysocketio.on('awareness-update', async (doc: Document) => {
-      const pageId = doc.name;
-
-      if (pageId == null) return;
-
-      const awarenessStateSize = doc.awareness.states.size;
+    // setup y-websocket persistence (includes awareness bridge and sync-on-load)
+    const persistence = createMongoDBPersistence(mdb, io, syncYDoc, (pageId) =>
+      this.getYDocStatus(pageId),
+    );
+    setPersistence(persistence);
 
 
-      // Triggered when awareness changes
-      io.in(getRoomNameWithId(RoomPrefix.PAGE, pageId)).emit(
-        SocketEventName.YjsAwarenessStateSizeUpdated,
-        awarenessStateSize,
-      );
+    // setup WebSocket server
+    const wss = new WebSocketServer({ noServer: true });
+    const handleUpgrade = createUpgradeHandler(sessionConfig);
 
 
-      // Triggered when the last user leaves the editor
-      if (awarenessStateSize === 0) {
-        const ydocStatus = await this.getYDocStatus(pageId);
-        const hasYdocsNewerThanLatestRevision =
-          ydocStatus === YDocStatus.DRAFT || ydocStatus === YDocStatus.ISOLATED;
+    httpServer.on('upgrade', async (request, socket, head) => {
+      const url = request.url ?? '';
 
 
-        io.in(getRoomNameWithId(RoomPrefix.PAGE, pageId)).emit(
-          SocketEventName.YjsHasYdocsNewerThanLatestRevisionUpdated,
-          hasYdocsNewerThanLatestRevision,
-        );
+      // Only handle /yjs/ paths; let Socket.IO and others pass through
+      if (!url.startsWith(YJS_PATH_PREFIX)) {
+        return;
       }
       }
-    });
-  }
-
-  private injectPersistence(
-    ysocketio: YSocketIO,
-    mdb: MongodbPersistence,
-  ): void {
-    const persistece = createMongoDBPersistence(mdb);
-
-    // foce set to private property
-    // biome-ignore lint/complexity/useLiteralKeys: ignore
-    ysocketio['persistence'] = persistece;
-  }
-
-  private registerAccessiblePageChecker(ysocketio: YSocketIO): void {
-    // check accessible page
-    ysocketio.nsp?.use(async (socket, next) => {
-      // extract page id from namespace
-      const pageId = socket.nsp.name.replace(/\/yjs\|/, '');
-      const user = (socket.request as RequestWithUser).user; // should be injected by SocketIOService
 
 
-      const Page = mongoose.model<IPage, PageModel>('Page');
-      const isAccessible = await Page.isAccessiblePageByViewer(pageId, user);
-
-      if (!isAccessible) {
-        return next(new Error('Forbidden'));
+      // Guard the socket against being closed by other upgrade handlers
+      // (e.g. Next.js's NextCustomServer.upgradeHandler) that run synchronously
+      // after this async handler yields at the first await.
+      const guard = guardSocket(socket);
+
+      try {
+        const result = await handleUpgrade(request, socket, head);
+
+        // Restore original socket methods now that all synchronous
+        // upgrade handlers have finished
+        guard.restore();
+
+        if (!result.authorized) {
+          // rejectUpgrade already wrote the HTTP error response but
+          // socket.destroy() was a no-op during the guard; clean up now
+          socket.destroy();
+          return;
+        }
+
+        wss.handleUpgrade(result.request, socket, head, (ws) => {
+          wss.emit('connection', ws, result.request);
+          setupWSConnection(ws, result.request, { docName: result.pageId });
+        });
+      } catch (err) {
+        guard.restore();
+
+        logger.error('Yjs upgrade handler failed unexpectedly', { url, err });
+        if (socket.writable) {
+          socket.write('HTTP/1.1 500 Internal Server Error\r\n\r\n');
+        }
+        socket.destroy();
       }
       }
-
-      return next();
     });
     });
+
+    logger.info('YjsService initialized with y-websocket');
   }
   }
 
 
   public async getYDocStatus(pageId: string): Promise<YDocStatus> {
   public async getYDocStatus(pageId: string): Promise<YDocStatus> {
@@ -187,14 +167,14 @@ class YjsService implements IYjsService {
     pageId: string,
     pageId: string,
     editingMarkdownLength?: number,
     editingMarkdownLength?: number,
   ): Promise<SyncLatestRevisionBody> {
   ): Promise<SyncLatestRevisionBody> {
-    const doc = this.ysocketio.documents.get(pageId);
+    const doc = docs.get(pageId);
 
 
     if (doc == null) {
     if (doc == null) {
       return { synced: false };
       return { synced: false };
     }
     }
 
 
-    const ytextLength = doc?.getText('codemirror').length;
-    syncYDoc(this.mdb, doc, true);
+    const ytextLength = doc.getText('codemirror').length;
+    await syncYDoc(this.mdb, doc, true);
 
 
     return {
     return {
       synced: true,
       synced: true,
@@ -205,24 +185,23 @@ class YjsService implements IYjsService {
     };
     };
   }
   }
 
 
-  public getCurrentYdoc(pageId: string): Ydoc | undefined {
-    const currentYdoc = this.ysocketio.documents.get(pageId);
-    return currentYdoc;
+  public getCurrentYdoc(pageId: string): WSSharedDoc | undefined {
+    return docs.get(pageId);
   }
   }
 }
 }
 
 
 let _instance: YjsService;
 let _instance: YjsService;
 
 
-export const initializeYjsService = (io: Server): void => {
+export const initializeYjsService = (
+  httpServer: http.Server,
+  io: Server,
+  sessionConfig: SessionConfig,
+): void => {
   if (_instance != null) {
   if (_instance != null) {
     throw new Error('YjsService is already initialized');
     throw new Error('YjsService is already initialized');
   }
   }
 
 
-  if (io == null) {
-    throw new Error("'io' is required if initialize YjsService");
-  }
-
-  _instance = new YjsService(io);
+  _instance = new YjsService(httpServer, io, sessionConfig);
 };
 };
 
 
 export const getYjsService = (): YjsService => {
 export const getYjsService = (): YjsService => {

+ 1 - 1
apps/app/src/stores-universal/use-next-themes.tsx

@@ -1,7 +1,7 @@
 import { ColorScheme } from '@growi/core';
 import { ColorScheme } from '@growi/core';
 import { isClient } from '@growi/core/dist/utils';
 import { isClient } from '@growi/core/dist/utils';
+import type { ThemeProviderProps, UseThemeProps } from 'next-themes';
 import { ThemeProvider, useTheme } from 'next-themes';
 import { ThemeProvider, useTheme } from 'next-themes';
-import type { ThemeProviderProps, UseThemeProps } from 'next-themes/dist/types';
 
 
 import { useForcedColorScheme } from '~/states/global';
 import { useForcedColorScheme } from '~/states/global';
 
 

+ 17 - 7
apps/app/src/stores/renderer.tsx

@@ -1,4 +1,4 @@
-import { useCallback, useEffect } from 'react';
+import { useCallback, useEffect, useRef } from 'react';
 import type { HtmlElementNode } from 'rehype-toc';
 import type { HtmlElementNode } from 'rehype-toc';
 import useSWR, { type SWRConfiguration, type SWRResponse } from 'swr';
 import useSWR, { type SWRConfiguration, type SWRResponse } from 'swr';
 
 
@@ -30,12 +30,22 @@ export const useViewOptions = (): SWRResponse<RendererOptions, Error> => {
   const rendererConfig = useRendererConfigExt();
   const rendererConfig = useRendererConfigExt();
   const setTocNode = useSetTocNode();
   const setTocNode = useSetTocNode();
 
 
-  const storeTocNodeHandler = useCallback(
-    (toc: HtmlElementNode) => {
-      setTocNode(toc);
-    },
-    [setTocNode],
-  );
+  // Store TOC node in a ref during render phase (called by rehype plugin inside ReactMarkdown),
+  // then sync to atom after commit to avoid "Cannot update a component while rendering a different component"
+  const pendingTocNodeRef = useRef<HtmlElementNode | null>(null);
+
+  const storeTocNodeHandler = useCallback((toc: HtmlElementNode) => {
+    pendingTocNodeRef.current = toc;
+  }, []);
+
+  // No dependency array: runs after every render because the ref mutation
+  // is invisible to React's dependency tracking
+  useEffect(() => {
+    if (pendingTocNodeRef.current != null) {
+      setTocNode(pendingTocNodeRef.current);
+      pendingTocNodeRef.current = null;
+    }
+  });
 
 
   const isAllDataValid = currentPagePath != null && rendererConfig != null;
   const isAllDataValid = currentPagePath != null && rendererConfig != null;
   const customGenerater =
   const customGenerater =

+ 1 - 1
apps/app/tsconfig.build.client.json

@@ -1,7 +1,7 @@
 {
 {
   "$schema": "http://json.schemastore.org/tsconfig",
   "$schema": "http://json.schemastore.org/tsconfig",
   "extends": "./tsconfig.json",
   "extends": "./tsconfig.json",
-  "include": [".next/types/**/*.ts"],
+  "include": [".next/types/**/*.ts", "src/@types/**/*.d.ts"],
   "compilerOptions": {
   "compilerOptions": {
     "strict": false,
     "strict": false,
     "strictNullChecks": true,
     "strictNullChecks": true,

+ 2 - 3
apps/slackbot-proxy/package.json

@@ -68,8 +68,7 @@
   },
   },
   "// comments for devDependencies": {
   "// comments for devDependencies": {
     "@tsed/*": "v6.133.1 causes 'TypeError: Cannot read properties of undefined (reading 'prototype')' with `@Middleware()`",
     "@tsed/*": "v6.133.1 causes 'TypeError: Cannot read properties of undefined (reading 'prototype')' with `@Middleware()`",
-    "@tsed/core,exceptions": "force package to local node_modules in tsconfig.json since pnpm reads wrong hoisted tsed version (https://github.com/pnpm/pnpm/issues/7158)",
-    "bootstrap": "v5.3.3 has a bug. refs: https://github.com/twbs/bootstrap/issues/39798"
+    "@tsed/core,exceptions": "force package to local node_modules in tsconfig.json since pnpm reads wrong hoisted tsed version (https://github.com/pnpm/pnpm/issues/7158)"
   },
   },
   "devDependencies": {
   "devDependencies": {
     "@popperjs/core": "^2.11.8",
     "@popperjs/core": "^2.11.8",
@@ -77,7 +76,7 @@
     "@tsed/exceptions": "=6.43.0",
     "@tsed/exceptions": "=6.43.0",
     "@tsed/json-mapper": "=6.43.0",
     "@tsed/json-mapper": "=6.43.0",
     "@types/bunyan": "^1.8.11",
     "@types/bunyan": "^1.8.11",
-    "bootstrap": "=5.3.2",
+    "bootstrap": "^5.3.8",
     "browser-bunyan": "^1.6.3",
     "browser-bunyan": "^1.6.3",
     "morgan": "^1.10.0"
     "morgan": "^1.10.0"
   }
   }

+ 3 - 3
package.json

@@ -79,12 +79,12 @@
     "stylelint-config-recommended-scss": "^14.0.0",
     "stylelint-config-recommended-scss": "^14.0.0",
     "ts-deepmerge": "^6.2.0",
     "ts-deepmerge": "^6.2.0",
     "ts-node": "^10.9.2",
     "ts-node": "^10.9.2",
-    "ts-patch": "^3.2.0",
+    "ts-patch": "^3.3.0",
     "tsconfig-paths": "^4.2.0",
     "tsconfig-paths": "^4.2.0",
     "tspc": "^1.1.2",
     "tspc": "^1.1.2",
     "turbo": "^2.1.3",
     "turbo": "^2.1.3",
-    "typescript": "~5.0.0",
-    "typescript-transform-paths": "^3.4.7",
+    "typescript": "^5.9.3",
+    "typescript-transform-paths": "^3.5.6",
     "vite": "^5.4.21",
     "vite": "^5.4.21",
     "vite-plugin-dts": "^3.9.1",
     "vite-plugin-dts": "^3.9.1",
     "vite-tsconfig-paths": "^5.0.1",
     "vite-tsconfig-paths": "^5.0.1",

+ 2 - 4
packages/core-styles/package.json

@@ -17,11 +17,9 @@
     "lint": "npm-run-all -p lint:*"
     "lint": "npm-run-all -p lint:*"
   },
   },
   "dependencies": {},
   "dependencies": {},
-  "// comments for defDependencies": {
-    "bootstrap": "v5.3.3 has a bug. refs: https://github.com/twbs/bootstrap/issues/39798"
-  },
+  "// comments for defDependencies": {},
   "devDependencies": {
   "devDependencies": {
-    "bootstrap": "=5.3.2"
+    "bootstrap": "^5.3.8"
   },
   },
   "peerDependencies": {
   "peerDependencies": {
     "@popperjs/core": "^2.11.8"
     "@popperjs/core": "^2.11.8"

+ 6 - 0
packages/core/CHANGELOG.md

@@ -1,5 +1,11 @@
 # @growi/core
 # @growi/core
 
 
+## 2.2.0
+
+### Minor Changes
+
+- [#10889](https://github.com/growilabs/growi/pull/10889) [`d4be7e6`](https://github.com/growilabs/growi/commit/d4be7e68c497f168a5e39688ce0ef3760a62c98f) Thanks [@yuki-takei](https://github.com/yuki-takei)! - add YJS_WEBSOCKET_BASE_PATH
+
 ## 2.1.0
 ## 2.1.0
 
 
 ### Minor Changes
 ### Minor Changes

+ 3 - 6
packages/core/package.json

@@ -1,6 +1,6 @@
 {
 {
   "name": "@growi/core",
   "name": "@growi/core",
-  "version": "2.1.0",
+  "version": "2.2.0",
   "description": "GROWI Core Libraries",
   "description": "GROWI Core Libraries",
   "license": "MIT",
   "license": "MIT",
   "keywords": [
   "keywords": [
@@ -69,12 +69,9 @@
     "lint": "npm-run-all -p lint:*",
     "lint": "npm-run-all -p lint:*",
     "test": "vitest run --coverage"
     "test": "vitest run --coverage"
   },
   },
-  "// comments for dependencies": {
-    "escape-string-regexp": "5.0.0 or above exports only ESM"
-  },
+  "// comments for dependencies": {},
   "dependencies": {
   "dependencies": {
-    "bson-objectid": "^2.0.4",
-    "escape-string-regexp": "^4.0.0"
+    "bson-objectid": "^2.0.4"
   },
   },
   "devDependencies": {
   "devDependencies": {
     "@types/express": "^4",
     "@types/express": "^4",

+ 5 - 0
packages/core/src/consts/ydoc-status.ts

@@ -13,3 +13,8 @@ export const YDocStatus = {
   ISOLATED: 'isolated',
   ISOLATED: 'isolated',
 } as const;
 } as const;
 export type YDocStatus = (typeof YDocStatus)[keyof typeof YDocStatus];
 export type YDocStatus = (typeof YDocStatus)[keyof typeof YDocStatus];
+
+/**
+ * The base path for Yjs WebSocket connections.
+ */
+export const YJS_WEBSOCKET_BASE_PATH = '/yjs';

+ 9 - 0
packages/core/src/index.ts

@@ -1,2 +1,11 @@
 export * from './consts';
 export * from './consts';
 export * from './interfaces';
 export * from './interfaces';
+
+// Type declaration for RegExp.escape() (ES2026, Stage 4)
+// Available natively in Node.js 24+ (V8 13.x+)
+// Can be removed once TypeScript adds built-in support
+declare global {
+  interface RegExpConstructor {
+    escape(str: string): string;
+  }
+}

+ 2 - 2
packages/core/src/utils/page-path-utils/generate-children-regexp.spec.ts

@@ -18,7 +18,7 @@ describe('generateChildrenRegExp', () => {
     },
     },
     {
     {
       path: '/parent (with brackets)',
       path: '/parent (with brackets)',
-      expected: '^\\/parent \\(with brackets\\)(\\/[^/]+)\\/?$',
+      expected: '^\\/parent\\x20\\(with\\x20brackets\\)(\\/[^/]+)\\/?$',
       validPaths: [
       validPaths: [
         '/parent (with brackets)/child',
         '/parent (with brackets)/child',
         '/parent (with brackets)/test',
         '/parent (with brackets)/test',
@@ -30,7 +30,7 @@ describe('generateChildrenRegExp', () => {
     },
     },
     {
     {
       path: '/parent[with square]',
       path: '/parent[with square]',
-      expected: '^\\/parent\\[with square\\](\\/[^/]+)\\/?$',
+      expected: '^\\/parent\\[with\\x20square\\](\\/[^/]+)\\/?$',
       validPaths: ['/parent[with square]/child', '/parent[with square]/test'],
       validPaths: ['/parent[with square]/child', '/parent[with square]/test'],
       invalidPaths: [
       invalidPaths: [
         '/parent[with square]',
         '/parent[with square]',

+ 1 - 3
packages/core/src/utils/page-path-utils/generate-children-regexp.ts

@@ -1,5 +1,3 @@
-import escapeStringRegexp from 'escape-string-regexp';
-
 import { isTopPage } from './is-top-page';
 import { isTopPage } from './is-top-page';
 
 
 /**
 /**
@@ -12,5 +10,5 @@ export const generateChildrenRegExp = (path: string): RegExp => {
 
 
   // https://regex101.com/r/mrDJrx/1
   // https://regex101.com/r/mrDJrx/1
   // ex. /parent/any_child OR /any_level1
   // ex. /parent/any_child OR /any_level1
-  return new RegExp(`^${escapeStringRegexp(path)}(\\/[^/]+)\\/?$`);
+  return new RegExp(`^${RegExp.escape(path)}(\\/[^/]+)\\/?$`);
 };
 };

+ 4 - 9
packages/core/src/utils/page-path-utils/index.ts

@@ -1,5 +1,3 @@
-import escapeStringRegexp from 'escape-string-regexp';
-
 import { isValidObjectId } from '../objectid-utils';
 import { isValidObjectId } from '../objectid-utils';
 import { addTrailingSlash } from '../path-utils';
 import { addTrailingSlash } from '../path-utils';
 import { isTopPage as _isTopPage } from './is-top-page';
 import { isTopPage as _isTopPage } from './is-top-page';
@@ -149,7 +147,7 @@ export const convertToNewAffiliationPath = (
   if (newPath == null) {
   if (newPath == null) {
     throw new Error('Please input the new page path');
     throw new Error('Please input the new page path');
   }
   }
-  const pathRegExp = new RegExp(`^${escapeStringRegexp(oldPath)}`, 'i');
+  const pathRegExp = new RegExp(`^${RegExp.escape(oldPath)}`, 'i');
   return childPath.replace(pathRegExp, newPath);
   return childPath.replace(pathRegExp, newPath);
 };
 };
 
 
@@ -239,8 +237,8 @@ export const isEitherOfPathAreaOverlap = (
   const path1WithSlash = addTrailingSlash(path1);
   const path1WithSlash = addTrailingSlash(path1);
   const path2WithSlash = addTrailingSlash(path2);
   const path2WithSlash = addTrailingSlash(path2);
 
 
-  const path1Area = new RegExp(`^${escapeStringRegexp(path1WithSlash)}`, 'i');
-  const path2Area = new RegExp(`^${escapeStringRegexp(path2WithSlash)}`, 'i');
+  const path1Area = new RegExp(`^${RegExp.escape(path1WithSlash)}`, 'i');
+  const path2Area = new RegExp(`^${RegExp.escape(path2WithSlash)}`, 'i');
 
 
   if (path1Area.test(path2) || path2Area.test(path1)) {
   if (path1Area.test(path2) || path2Area.test(path1)) {
     return true;
     return true;
@@ -266,10 +264,7 @@ export const isPathAreaOverlap = (
 
 
   const pathWithSlash = addTrailingSlash(pathToTest);
   const pathWithSlash = addTrailingSlash(pathToTest);
 
 
-  const pathAreaToTest = new RegExp(
-    `^${escapeStringRegexp(pathWithSlash)}`,
-    'i',
-  );
+  const pathAreaToTest = new RegExp(`^${RegExp.escape(pathWithSlash)}`, 'i');
   if (pathAreaToTest.test(pathToBeTested)) {
   if (pathAreaToTest.test(pathToBeTested)) {
     return true;
     return true;
   }
   }

Vissa filer visades inte eftersom för många filer har ändrats