فهرست منبع

Merge branch 'master' into fix/178290-bulk-export-aws-minimal-2

Yuki Takei 1 هفته پیش
والد
کامیت
6839df7588
100فایلهای تغییر یافته به همراه7217 افزوده شده و 766 حذف شده
  1. 0 144
      .claude/agents/security-reviewer.md
  2. 83 0
      .claude/commands/create-next-version-branch.md
  3. 307 0
      .claude/commands/kiro/spec-cleanup.md
  4. 40 4
      .claude/commands/learn.md
  5. 107 6
      .claude/commands/tdd.md
  6. 22 0
      .claude/hooks/session-start.sh
  7. 38 0
      .claude/rules/testing.md
  8. 49 2
      .claude/settings.json
  9. 0 27
      .claude/skills/learned/.gitkeep
  10. 122 0
      .claude/skills/learned/essential-test-design/SKILL.md
  11. 68 10
      .claude/skills/learned/essential-test-patterns/SKILL.md
  12. 22 4
      .claude/skills/monorepo-overview/SKILL.md
  13. 11 3
      .claude/skills/tech-stack/SKILL.md
  14. 5 2
      .devcontainer/app/devcontainer.json
  15. 4 0
      .devcontainer/pdf-converter/devcontainer.json
  16. 5 5
      .github/mergify.yml
  17. 15 32
      .github/workflows/ci-app-prod.yml
  18. 3 3
      .github/workflows/ci-app.yml
  19. 4 4
      .github/workflows/ci-pdf-converter.yml
  20. 4 4
      .github/workflows/ci-slackbot-proxy.yml
  21. 2 2
      .github/workflows/release-rc.yml
  22. 2 2
      .github/workflows/release-subpackages.yml
  23. 1 1
      .github/workflows/reusable-app-build-image.yml
  24. 22 27
      .github/workflows/reusable-app-prod.yml
  25. 1 0
      .gitignore
  26. 268 0
      .kiro/specs/collaborative-editor/design.md
  27. 79 0
      .kiro/specs/collaborative-editor/requirements.md
  28. 69 0
      .kiro/specs/collaborative-editor/research.md
  29. 22 0
      .kiro/specs/collaborative-editor/spec.json
  30. 3 0
      .kiro/specs/collaborative-editor/tasks.md
  31. 153 0
      .kiro/specs/hotkeys/design.md
  32. 101 0
      .kiro/specs/hotkeys/requirements.md
  33. 23 0
      .kiro/specs/hotkeys/spec.json
  34. 29 0
      .kiro/specs/hotkeys/tasks.md
  35. 764 0
      .kiro/specs/oauth2-email-support/design.md
  36. 57 0
      .kiro/specs/oauth2-email-support/requirements.md
  37. 449 0
      .kiro/specs/oauth2-email-support/research.md
  38. 23 0
      .kiro/specs/oauth2-email-support/spec.json
  39. 41 0
      .kiro/specs/oauth2-email-support/tasks.md
  40. 233 0
      .kiro/specs/official-docker-image/design.md
  41. 82 0
      .kiro/specs/official-docker-image/requirements.md
  42. 288 0
      .kiro/specs/official-docker-image/research.md
  43. 22 0
      .kiro/specs/official-docker-image/spec.json
  44. 193 0
      .kiro/specs/official-docker-image/tasks.md
  45. 284 0
      .kiro/specs/presentation/design.md
  46. 26 0
      .kiro/specs/presentation/requirements.md
  47. 84 0
      .kiro/specs/presentation/research.md
  48. 23 0
      .kiro/specs/presentation/spec.json
  49. 262 0
      .kiro/specs/upgrade-fixed-packages/design.md
  50. 75 0
      .kiro/specs/upgrade-fixed-packages/requirements.md
  51. 183 0
      .kiro/specs/upgrade-fixed-packages/research.md
  52. 22 0
      .kiro/specs/upgrade-fixed-packages/spec.json
  53. 89 0
      .kiro/specs/upgrade-fixed-packages/tasks.md
  54. 14 8
      .kiro/steering/structure.md
  55. 4 18
      .kiro/steering/tdd.md
  56. 53 9
      .kiro/steering/tech.md
  57. 1 21
      .mcp.json
  58. 0 2
      .npmrc
  59. 64 8
      .serena/project.yml
  60. 3 6
      AGENTS.md
  61. 48 1
      CHANGELOG.md
  62. 2 0
      CLAUDE.md
  63. 1 1
      README.md
  64. 1 1
      README_JP.md
  65. 74 0
      apps/app/.claude/rules/package-dependencies.md
  66. 47 2
      apps/app/.claude/skills/app-commands/SKILL.md
  67. 1 1
      apps/app/.claude/skills/app-specific-patterns/SKILL.md
  68. 127 0
      apps/app/.claude/skills/build-optimization/SKILL.md
  69. 90 0
      apps/app/.claude/skills/learned/fix-broken-next-symlinks/SKILL.md
  70. 302 0
      apps/app/.claude/skills/learned/page-save-origin-semantics/SKILL.md
  71. 116 0
      apps/app/.claude/skills/vendor-styles-components/SKILL.md
  72. 2 0
      apps/app/.env.development
  73. 4 0
      apps/app/.gitignore
  74. 13 1
      apps/app/AGENTS.md
  75. 18 0
      apps/app/CLAUDE.md
  76. 30 0
      apps/app/bin/assemble-prod.sh
  77. 44 0
      apps/app/bin/check-next-symlinks.sh
  78. 60 0
      apps/app/bin/measure-chunk-stats.sh
  79. 13 11
      apps/app/bin/openapi/definition-apiv3.js
  80. 2 8
      apps/app/config/next-i18next.config.js
  81. 79 51
      apps/app/docker/Dockerfile
  82. 74 4
      apps/app/docker/Dockerfile.dockerignore
  83. 11 1
      apps/app/docker/README.md
  84. 2 0
      apps/app/docker/codebuild/buildspec.yml
  85. 0 18
      apps/app/docker/docker-entrypoint.sh
  86. 358 0
      apps/app/docker/docker-entrypoint.spec.ts
  87. 265 0
      apps/app/docker/docker-entrypoint.ts
  88. 0 5
      apps/app/next-env.d.ts
  89. 0 173
      apps/app/next.config.js
  90. 26 0
      apps/app/next.config.prod.cjs
  91. 146 0
      apps/app/next.config.ts
  92. 61 53
      apps/app/package.json
  93. 1 1
      apps/app/playwright.config.ts
  94. 21 0
      apps/app/public/static/locales/en_US/admin.json
  95. 48 27
      apps/app/public/static/locales/fr_FR/admin.json
  96. 3 1
      apps/app/public/static/locales/fr_FR/commons.json
  97. 53 52
      apps/app/public/static/locales/fr_FR/translation.json
  98. 9 0
      apps/app/public/static/locales/ja_JP/admin.json
  99. 21 0
      apps/app/public/static/locales/ko_KR/admin.json
  100. 21 0
      apps/app/public/static/locales/zh_CN/admin.json

+ 0 - 144
.claude/agents/security-reviewer.md

@@ -1,144 +0,0 @@
----
-name: security-reviewer
-description: Security vulnerability detection specialist for GROWI. Use after writing code that handles user input, authentication, API endpoints, or sensitive data. Flags secrets, injection, XSS, and OWASP Top 10 vulnerabilities.
-tools: Read, Write, Edit, Bash, Grep, Glob
-model: opus
----
-
-# Security Reviewer
-
-You are a security specialist focused on identifying vulnerabilities in the GROWI codebase. Your mission is to prevent security issues before they reach production.
-
-## GROWI Security Stack
-
-GROWI uses these security measures:
-- **helmet**: Security headers
-- **express-mongo-sanitize**: NoSQL injection prevention
-- **xss**, **rehype-sanitize**: XSS prevention
-- **Passport.js**: Authentication (Local, LDAP, SAML, OAuth)
-
-## Security Review Workflow
-
-### 1. Automated Checks
-```bash
-# Check for vulnerable dependencies
-pnpm audit
-
-# Search for potential secrets
-grep -r "api[_-]?key\|password\|secret\|token" --include="*.ts" --include="*.tsx" .
-```
-
-### 2. OWASP Top 10 Checklist
-
-1. **Injection (NoSQL)** - Are Mongoose queries safe? No string concatenation in queries?
-2. **Broken Authentication** - Passwords hashed? Sessions secure? Passport configured correctly?
-3. **Sensitive Data Exposure** - Secrets in env vars? HTTPS enforced? Logs sanitized?
-4. **Broken Access Control** - Authorization on all routes? CORS configured?
-5. **Security Misconfiguration** - Helmet enabled? Debug mode off in production?
-6. **XSS** - Output escaped? Content-Security-Policy set?
-7. **Components with Vulnerabilities** - `pnpm audit` clean?
-8. **Insufficient Logging** - Security events logged?
-
-## Vulnerability Patterns
-
-### Hardcoded Secrets (CRITICAL)
-```typescript
-// ❌ CRITICAL
-const apiKey = "sk-xxxxx"
-
-// ✅ CORRECT
-const apiKey = process.env.API_KEY
-```
-
-### NoSQL Injection (CRITICAL)
-```typescript
-// ❌ CRITICAL: Unsafe query
-const user = await User.findOne({ email: req.body.email, password: req.body.password })
-
-// ✅ CORRECT: Use express-mongo-sanitize middleware + validate input
-```
-
-### XSS (HIGH)
-```typescript
-// ❌ HIGH: Direct HTML insertion
-element.innerHTML = userInput
-
-// ✅ CORRECT: Use textContent or sanitize
-element.textContent = userInput
-// OR use xss library
-import xss from 'xss'
-element.innerHTML = xss(userInput)
-```
-
-### SSRF (HIGH)
-```typescript
-// ❌ HIGH: User-controlled URL
-const response = await fetch(userProvidedUrl)
-
-// ✅ CORRECT: Validate URL against allowlist
-const allowedDomains = ['api.example.com']
-const url = new URL(userProvidedUrl)
-if (!allowedDomains.includes(url.hostname)) {
-  throw new Error('Invalid URL')
-}
-```
-
-### Authorization Check (CRITICAL)
-```typescript
-// ❌ CRITICAL: No authorization
-app.get('/api/page/:id', async (req, res) => {
-  const page = await Page.findById(req.params.id)
-  res.json(page)
-})
-
-// ✅ CORRECT: Check user access
-app.get('/api/page/:id', loginRequired, async (req, res) => {
-  const page = await Page.findById(req.params.id)
-  if (!page.isAccessibleBy(req.user)) {
-    return res.status(403).json({ error: 'Forbidden' })
-  }
-  res.json(page)
-})
-```
-
-## Security Report Format
-
-```markdown
-## Security Review Summary
-- **Critical Issues:** X
-- **High Issues:** Y
-- **Risk Level:** 🔴 HIGH / 🟡 MEDIUM / 🟢 LOW
-
-### Issues Found
-1. **[SEVERITY]** Description @ `file:line`
-   - Impact: ...
-   - Fix: ...
-```
-
-## When to Review
-
-**ALWAYS review when:**
-- New API endpoints added
-- Authentication/authorization changed
-- User input handling added
-- Database queries modified
-- File upload features added
-- Dependencies updated
-
-## Best Practices
-
-1. **Defense in Depth** - Multiple security layers
-2. **Least Privilege** - Minimum permissions
-3. **Fail Securely** - Errors don't expose data
-4. **Separation of Concerns** - Isolate security-critical code
-5. **Keep it Simple** - Complex code has more vulnerabilities
-6. **Don't Trust Input** - Validate everything
-7. **Update Regularly** - Keep dependencies current
-
-## Emergency Response
-
-If CRITICAL vulnerability found:
-1. Document the issue
-2. Provide secure code fix
-3. Check if vulnerability was exploited
-4. Rotate any exposed secrets

+ 83 - 0
.claude/commands/create-next-version-branch.md

@@ -0,0 +1,83 @@
+---
+name: create-next-version-branch
+description: Create development and release branches with GitHub Release for the next version. Usage: /create-next-version-branch dev/{major}.{minor}.x
+---
+
+# Create Next Version Branch
+
+Automate the creation of development branches and GitHub Release for a new GROWI version.
+
+## Input
+
+The argument `$ARGUMENTS` must be a branch name in the format `dev/{major}.{minor}.x` (e.g., `dev/7.5.x`).
+
+## Procedure
+
+### Step 1: Parse and Validate Input
+
+1. Parse `$ARGUMENTS` to extract `{major}` and `{minor}` from the `dev/{major}.{minor}.x` pattern
+2. If the format is invalid, display an error and stop:
+   - Must match `dev/{number}.{number}.x`
+3. Set the following variables:
+   - `DEV_BRANCH`: `dev/{major}.{minor}.x`
+   - `RELEASE_BRANCH`: `release/{major}.{minor}.x`
+   - `TAG_NAME`: `v{major}.{minor}.x-base`
+   - `RELEASE_TITLE`: `v{major}.{minor}.x Base Release`
+
+### Step 2: Create and Push the Development Branch
+
+1. Confirm with the user before proceeding
+2. Create and push `DEV_BRANCH` from the current HEAD:
+   ```bash
+   git checkout -b {DEV_BRANCH}
+   git push origin {DEV_BRANCH}
+   ```
+
+### Step 3: Create GitHub Release
+
+1. Create a GitHub Release using `gh release create`:
+   ```bash
+   gh release create {TAG_NAME} \
+     --target {DEV_BRANCH} \
+     --title "{RELEASE_TITLE}" \
+     --notes "The base release for release-drafter to avoid \`Error: GraphQL Rate Limit Exceeded\`
+   https://github.com/release-drafter/release-drafter/issues/1018" \
+     --latest=false \
+     --prerelease=false
+   ```
+   - `--latest=false`: Do NOT set as latest release
+   - `--prerelease=false`: Do NOT set as pre-release
+
+### Step 4: Verify targetCommitish
+
+1. Run the following command and confirm that `targetCommitish` equals `DEV_BRANCH`:
+   ```bash
+   gh release view {TAG_NAME} --json targetCommitish
+   ```
+2. If `targetCommitish` does not match, display an error and stop
+
+### Step 5: Create and Push the Release Branch
+
+1. From the same commit (still on `DEV_BRANCH`), create and push `RELEASE_BRANCH`:
+   ```bash
+   git checkout -b {RELEASE_BRANCH}
+   git push origin {RELEASE_BRANCH}
+   ```
+
+### Step 6: Summary
+
+Display a summary of all created resources:
+
+```
+Created:
+  - Branch: {DEV_BRANCH} (pushed to origin)
+  - Branch: {RELEASE_BRANCH} (pushed to origin)
+  - GitHub Release: {RELEASE_TITLE} (tag: {TAG_NAME}, target: {DEV_BRANCH})
+```
+
+## Error Handling
+
+- If `DEV_BRANCH` already exists on the remote, warn the user and ask how to proceed
+- If `RELEASE_BRANCH` already exists on the remote, warn the user and ask how to proceed
+- If the tag `TAG_NAME` already exists, warn the user and ask how to proceed
+- If `gh` CLI is not authenticated, instruct the user to run `gh auth login`

+ 307 - 0
.claude/commands/kiro/spec-cleanup.md

@@ -0,0 +1,307 @@
+---
+description: Organize and clean up specification documents after implementation completion
+allowed-tools: Bash, Glob, Grep, Read, Write, Edit, MultiEdit, Update
+argument-hint: <feature-name>
+---
+
+# Specification Cleanup
+
+<background_information>
+- **Mission**: Organize specification documents after implementation completion, removing implementation details while preserving essential context for future refactoring
+- **Success Criteria**:
+  - Implementation details (testing procedures, deployment checklists) removed
+  - Design decisions and constraints preserved in research.md and design.md
+  - Requirements simplified (Acceptance Criteria condensed to summaries)
+  - Unimplemented features removed or documented
+  - Documents remain valuable for future refactoring work
+</background_information>
+
+<instructions>
+## Core Task
+Clean up and organize specification documents for feature **$1** after implementation is complete.
+
+## Organizing Principle
+
+**"Can we read essential context from these spec documents when refactoring this feature months later?"**
+
+- **Keep**: "Why" (design decisions, architectural constraints, limitations, trade-offs)
+- **Remove**: "How" (testing procedures, deployment steps, detailed implementation examples)
+
+## Execution Steps
+
+### Step 1: Load Context
+
+**Discover all spec files**:
+- Use Glob to find all files in `.kiro/specs/$1/` directory
+- Categorize files:
+  - **Core files** (must preserve): `spec.json`, `requirements.md`, `design.md`, `tasks.md`, `research.md`
+  - **Other files** (evaluate case-by-case): validation reports, notes, prototypes, migration guides, etc.
+
+**Read all discovered files**:
+- Read all core files first
+- Read other files to understand their content and value
+
+**Determine target language**:
+- Read `spec.json` and extract the `language` field (e.g., `"ja"`, `"en"`)
+- This is the language ALL spec document content must be written in
+- Note: code comments within code blocks are exempt (must stay in English per project rules)
+
+**Verify implementation status**:
+- Check that tasks are marked complete `[x]` in tasks.md
+- If implementation incomplete, warn user and ask to confirm cleanup
+
+### Step 2: Analyze Current State
+
+**Identify cleanup opportunities**:
+
+1. **Other files** (non-core files like validation-report.md, notes.md, etc.):
+   - Read each file to understand its content and purpose
+   - Identify valuable information that should be preserved:
+     * Implementation discoveries and lessons learned
+     * Critical constraints or design decisions
+     * Historical context for future refactoring
+   - Determine salvage strategy:
+     * Migrate valuable content to research.md or design.md
+     * Keep file if it contains essential reference information
+     * Delete if content is redundant or no longer relevant
+   - **Case-by-case evaluation required** - never assume files should be deleted
+
+2. **research.md**:
+   - Should contain production discoveries and implementation lessons learned
+   - Check if implementation revealed new constraints or patterns to document
+   - Identify content from other files that should be migrated here
+
+3. **requirements.md**:
+   - Identify verbose Acceptance Criteria that can be condensed to summaries
+   - Find unimplemented requirements (compare with tasks.md)
+   - Detect duplicate or redundant content
+
+4. **design.md**:
+   - Identify implementation-specific sections that can be removed:
+     * Detailed Testing Strategy (test procedures)
+     * Security Considerations (if covered in implementation)
+     * Error Handling code examples (if implemented)
+     * Migration Strategy (after migration complete)
+     * Deployment Checklist (after deployment)
+   - Identify sections to preserve:
+     * Architecture diagrams (essential for understanding)
+     * Component interfaces (API contracts)
+     * Design decisions and rationale
+     * Critical implementation constraints
+     * Known limitations
+   - Check if content from other files should be migrated here
+
+5. **Language audit** (compare actual language vs. `spec.json.language`):
+   - For each markdown file, scan prose content (headings, paragraphs, list items) and detect the written language
+   - Flag any file or section whose language does **not** match the target language
+   - Exemptions — do NOT flag:
+     * Content inside fenced code blocks (` ``` `) — code comments must stay in English
+     * Inline code spans (`` `...` ``)
+     * Proper nouns, technical terms, and identifiers that are always written in English
+   - Collect flagged items into a **translation plan**: file name, approximate line range, detected language, and a brief excerpt
+
+### Step 3: Interactive Confirmation
+
+**Present cleanup plan to user**:
+
+For each file and section identified in Step 2, ask:
+- "Should I delete/simplify/keep/salvage this section?"
+- Provide recommendations based on organizing principle
+- Show brief preview of content to aid decision
+
+**Example questions for other files**:
+- "validation-report.md found. Contains {brief summary}. Options:"
+  - "A: Migrate valuable content to research.md, then delete"
+  - "B: Keep as historical reference"
+  - "C: Delete (content no longer needed)"
+- "notes.md found. Contains {brief summary}. Salvage to research.md before deleting? [Y/n]"
+
+**Example questions for core files**:
+- "research.md: Add 'Session N: Production Discoveries' section to document implementation lessons? [Y/n]"
+- "requirements.md: Simplify Acceptance Criteria from detailed bullet points to summary paragraphs? [Y/n]"
+- "requirements.md: Remove unimplemented requirements (e.g., Req 4.4 field masking not implemented)? [Y/n]"
+- "design.md: Delete 'Testing Strategy' section (lines X-Y)? [Y/n]"
+- "design.md: Delete 'Security Considerations' section (lines X-Y)? [Y/n]"
+- "design.md: Keep Architecture diagrams (essential for refactoring)? [Y/n]"
+
+**Translation confirmation** (if language mismatches were found in Step 2):
+- Show summary: "Found content in language(s) other than `{target_language}` in the following files:"
+  - List each flagged file with line range and a short excerpt
+- Ask: "Translate mismatched content to `{target_language}`? [Y/n]"
+  - If Y: translate all flagged sections in Step 4
+  - If n: skip translation (leave files as-is)
+- Note: code blocks are never translated
+
+**Batch similar decisions**:
+- Group related sections (e.g., all "delete implementation details" decisions)
+- Allow user to approve categories rather than individual items
+- Present file-by-file salvage decisions for other files
+
+### Step 4: Execute Cleanup
+
+**For each approved action**:
+
+1. **Salvage and cleanup other files** (if approved):
+   - For each non-core file (validation-report.md, notes.md, etc.):
+     * Extract valuable information (implementation lessons, constraints, decisions)
+     * Migrate content to appropriate core file:
+       - Technical discoveries → research.md
+       - Design constraints → design.md
+       - Requirement clarifications → requirements.md
+     * Delete file after salvage (if approved)
+   - Document salvaged content with source reference (e.g., "From validation-report.md:")
+
+2. **Update research.md** (if new discoveries or salvaged content):
+   - Add new section "Session N: Production Implementation Discoveries" (if needed)
+   - Document critical technical constraints discovered during implementation
+   - Include code examples for critical patterns (e.g., falsy checks, credential preservation)
+   - Integrate salvaged content from other files
+   - Cross-reference requirements.md and design.md where relevant
+
+3. **Simplify requirements.md** (if approved):
+   - Transform detailed Acceptance Criteria into summary paragraphs
+   - Remove unimplemented requirements entirely
+   - Preserve requirement objectives and summaries
+   - Example transformation:
+     ```
+     Before: "1. System shall X... 2. System shall Y... [7 criteria]"
+     After: "**Summary**: System provides X and Y. Configuration includes..."
+     ```
+
+4. **Clean up design.md** (if approved):
+   - Delete approved sections (Testing Strategy, Security Considerations, etc.)
+   - Add "Critical Implementation Constraints" section if implementation revealed new constraints
+   - Integrate salvaged content from other files (if relevant)
+   - Preserve architecture diagrams and component interfaces
+   - Keep design decisions and rationale sections
+
+5. **Translate language-mismatched content** (if approved):
+   - For each flagged file and section, translate prose content to the target language
+   - **Never translate**: content inside fenced code blocks or inline code spans
+   - Preserve all Markdown formatting (headings, bold, lists, links, etc.)
+   - After translation, verify the overall document reads naturally in the target language
+   - Document translated files in the cleanup summary
+
+6. **Update spec.json metadata**:
+   - Set `phase: "implementation-complete"` (if not already set)
+   - Add `cleanup_completed: true` flag
+   - Update `updated_at` timestamp
+
+### Step 5: Generate Cleanup Summary
+
+**Provide summary report**:
+- List of files modified/deleted
+- Sections removed and lines saved
+- Critical information preserved
+- Recommendations for future refactoring
+
+**Format**:
+```markdown
+## Cleanup Summary for {feature-name}
+
+### Files Modified
+- ✅ validation-report.md: Salvaged to research.md, then deleted (730 lines removed)
+- ✅ notes.md: Salvaged to design.md, then deleted (120 lines removed)
+- ✅ research.md: Added Session 2 discoveries + salvaged content (180 lines added)
+- ✅ requirements.md: Simplified 6 requirements (350 lines → 180 lines)
+- ✅ design.md: Removed 4 sections, added constraints + salvaged content (250 lines removed, 100 added)
+- ✅ requirements.md: Translated mismatched sections to {target_language}
+
+### Information Salvaged
+- Implementation discoveries from validation-report.md → research.md
+- Design notes from notes.md → design.md
+- Historical context preserved with source attribution
+
+### Information Preserved
+- Architecture diagrams and component interfaces
+- Design decisions and rationale
+- Critical implementation constraints
+- Known limitations and trade-offs
+
+### Next Steps
+- Spec documents ready for future refactoring reference
+- Consider creating knowledge base entry if pattern is reusable
+```
+
+## Critical Constraints
+
+- **User approval required**: Never delete content without explicit confirmation
+- **Language consistency**: All prose content must be written in the language specified in `spec.json.language`; translate any mismatched sections (code blocks exempt)
+- **Preserve history**: Don't delete discovery rationale or design decisions
+- **Balance brevity with completeness**: Remove redundancy but keep essential context
+- **Interactive workflow**: Pause for user input rather than making assumptions
+
+## Tool Guidance
+
+- **Glob**: Discover all files in `.kiro/specs/{feature}/` directory
+- **Read**: Load all discovered files for analysis
+- **Grep**: Search for patterns (e.g., unimplemented requirements, completed tasks)
+- **Edit/Write**: Update files based on approved changes, salvage content
+- **Bash**: Delete files after salvage (if approved)
+- **MultiEdit**: For batch edits across multiple sections
+
+## Output Description
+
+Provide cleanup plan and execution report in the language specified in spec.json.
+
+**Report Structure**:
+1. **Current State Analysis**: What needs cleanup and why
+2. **Cleanup Plan**: Proposed changes with recommendations
+3. **Confirmation Prompts**: Interactive questions for user approval
+4. **Execution Summary**: What was changed and why
+5. **Preserved Context**: What critical information remains for future refactoring
+
+**Format**: Clear, scannable format with sections and bullet points
+
+## Safety & Fallback
+
+### Error Scenarios
+
+**Implementation Incomplete**:
+- **Condition**: Less than 90% of tasks marked `[x]` in tasks.md
+- **Action**: Warn user: "Implementation appears incomplete (X/Y tasks done). Continue cleanup? [y/N]"
+- **Recommendation**: Wait until implementation complete before cleanup
+
+**Spec Not Found**:
+- **Message**: "No spec found for `$1`. Check available specs in `.kiro/specs/`"
+- **Action**: List available spec directories
+
+**Missing Critical Files**:
+- **Condition**: requirements.md or design.md missing
+- **Action**: Skip cleanup for missing files, proceed with available files
+- **Warning**: "requirements.md missing - cannot simplify requirements"
+
+### Dry Run Mode (Future Enhancement)
+
+**If `-n` or `--dry-run` flag provided**:
+- Show cleanup plan without executing changes
+- Allow user to review before committing to cleanup
+
+### Backup Recommendation
+
+**Before cleanup**:
+- Recommend user create git commit or backup
+- Warning: "This will modify spec files. Commit current state first? [Y/n]"
+
+### Undo Support
+
+**If cleanup goes wrong**:
+- Use git to restore previous state: `git checkout HEAD -- .kiro/specs/{feature}/`
+- Remind user to commit before cleanup for easy rollback
+
+## Example Usage
+
+```bash
+# Basic cleanup after implementation
+/kiro:spec-cleanup oauth2-email-support
+
+# With conversation context about implementation discoveries
+# Command will prompt for Session N discoveries to document
+/kiro:spec-cleanup user-authentication
+```
+
+## Related Commands
+
+- `/kiro:spec-impl {feature}` - Implement tasks (run before cleanup)
+- `/kiro:validate-impl {feature}` - Validate implementation (run before cleanup)
+- `/kiro:spec-status {feature}` - Check implementation status

+ 40 - 4
.claude/commands/learn.md

@@ -25,7 +25,18 @@ Focus on four key areas:
 
 ## Skill File Structure
 
-Extracted patterns are saved in `.claude/skills/learned/{topic-name}/SKILL.md` with:
+Extracted patterns are saved in **appropriate skill directories** based on the scope of the pattern:
+
+**Workspace-specific patterns**:
+- `apps/{workspace}/.claude/skills/learned/{topic-name}/SKILL.md`
+- `packages/{package}/.claude/skills/learned/{topic-name}/SKILL.md`
+- Examples: patterns specific to a single app or package
+
+**Global patterns** (monorepo-wide):
+- `.claude/skills/learned/{topic-name}/SKILL.md`
+- Examples: patterns applicable across all workspaces
+
+### File Template
 
 ```yaml
 ---
@@ -49,12 +60,19 @@ description: Brief description (auto-invoked when working on related code)
 ## GROWI-Specific Examples
 
 Topics commonly learned in GROWI development:
-- `virtualized-tree-patterns` — @headless-tree + @tanstack/react-virtual optimizations
+
+**Apps/app-specific** (`apps/app/.claude/skills/learned/`):
+- `page-save-origin-semantics` — Origin-based conflict detection for collaborative editing
 - `socket-jotai-integration` — Real-time state synchronization patterns
 - `api-v3-error-handling` — RESTful API error response patterns
-- `jotai-atom-composition` — Derived atoms and state composition
 - `mongodb-query-optimization` — Mongoose indexing and aggregation patterns
 
+**Global monorepo patterns** (`.claude/skills/learned/`):
+- `virtualized-tree-patterns` — @headless-tree + @tanstack/react-virtual optimizations (if used across apps)
+- `jotai-atom-composition` — Derived atoms and state composition (if shared pattern)
+- `turborepo-cache-invalidation` — Build cache debugging techniques
+- `pnpm-workspace-dependencies` — Workspace dependency resolution issues
+
 ## Quality Guidelines
 
 **Extract:**
@@ -74,7 +92,25 @@ Topics commonly learned in GROWI development:
 1. User triggers `/learn` after solving a complex problem
 2. Review the session to identify valuable patterns
 3. Draft skill file(s) with clear structure
-4. Save to `.claude/skills/learned/{topic-name}/SKILL.md`
+4. **Autonomously determine the appropriate directory**:
+   - Analyze the pattern's scope (which files/modules were involved)
+   - If pattern is specific to a workspace in `apps/*` or `packages/*`:
+     - Save to `{workspace}/.claude/skills/learned/{topic-name}/SKILL.md`
+   - If pattern is applicable across multiple workspaces:
+     - Save to `.claude/skills/learned/{topic-name}/SKILL.md`
 5. Skills automatically apply in future sessions when working on related code
 
+### Directory Selection Logic
+
+**Workspace-specific** (save to `{workspace}/.claude/skills/learned/`):
+- Pattern involves workspace-specific concepts, models, or APIs
+- References files primarily in one `apps/*` or `packages/*` directory
+- Example: Page save logic in `apps/app` → `apps/app/.claude/skills/learned/`
+
+**Global** (save to `.claude/skills/learned/`):
+- Pattern applies across multiple workspaces
+- Involves monorepo-wide tools (Turborepo, pnpm, Biome, Vitest)
+- Shared coding patterns or architectural principles
+- Example: Turborepo caching pitfall → `.claude/skills/learned/`
+
 Learned skills are automatically invoked based on their description when working on related code.

+ 107 - 6
.claude/commands/tdd.md

@@ -7,6 +7,71 @@ description: Enforce test-driven development workflow. Scaffold interfaces, gene
 
 This command enforces test-driven development methodology for GROWI.
 
+---
+
+## ⚠️ CRITICAL: MANDATORY TDD PROTOCOL
+
+**THIS IS NOT OPTIONAL. THIS IS A TEAM REQUIREMENT.**
+
+### Non-Negotiable Rules
+
+1. **ALL phases of TDD MUST be followed: RED → GREEN → REFACTOR**
+2. **Tests MUST be written BEFORE any implementation code (RED phase)**
+3. **Implementation MUST be minimal and only to pass tests (GREEN phase)**
+4. **Tests MUST remain passing during refactoring (REFACTOR phase)**
+5. **NEVER skip any phase of the TDD cycle**
+6. **If you believe you cannot follow TDD, STOP and ask the user IMMEDIATELY**
+
+### Violation Protocol
+
+If you find yourself about to deviate from the TDD cycle:
+
+1. **STOP IMMEDIATELY** - Do not proceed with any phase out of order
+2. **ASK THE USER** - Explain why you think following TDD is difficult
+3. **WAIT FOR APPROVAL** - Do not proceed without explicit user consent
+4. **DOCUMENT THE EXCEPTION** - If approved, document why TDD was skipped
+
+**Example of required user confirmation:**
+
+```
+"This task requires writing integration tests with complex setup
+(database mocks, authentication, etc.).
+
+I have two options:
+
+Option A: Follow strict TDD (recommended)
+- RED: Write failing integration test first
+- GREEN: Implement minimal code to pass
+- REFACTOR: Improve code while keeping tests green
+- May take 15-30 minutes for test setup
+- Ensures proper test coverage
+
+Option B: Skip TDD for this specific change
+- Implement directly
+- Verify with existing test suite only
+- Risk: No specific test for this change
+
+Which approach would you prefer?"
+```
+
+**Never assume. Always ask.**
+
+---
+
+## Pre-Implementation Checklist
+
+Before writing ANY code, confirm:
+
+- [ ] I have read and understood the task requirements
+- [ ] I am starting with Step 1: Write a FAILING test (RED phase)
+- [ ] I have NOT written any implementation code yet
+- [ ] I will follow all phases: RED → GREEN → REFACTOR
+- [ ] If I cannot follow the complete TDD cycle, I have ASKED THE USER for guidance
+
+**If any checkbox is unchecked, STOP and ask the user.**
+
+---
+
 ## What This Command Does
 
 1. **Scaffold Interfaces** - Define types/interfaces first
@@ -61,6 +126,8 @@ export function validatePagePath(path: string): ValidationResult {
 
 ## Step 2: Write Failing Test (RED)
 
+**⚠️ CHECKPOINT: Confirm you have NOT written any implementation code yet.**
+
 ```typescript
 // src/utils/page-path-validator.spec.ts
 describe('validatePagePath', () => {
@@ -84,6 +151,8 @@ describe('validatePagePath', () => {
 
 ## Step 3: Run Tests - Verify FAIL
 
+**⚠️ MANDATORY: Tests MUST fail before proceeding to implementation.**
+
 ```bash
 turbo run test --filter @growi/app -- src/utils/page-path-validator.spec.ts
 
@@ -92,10 +161,14 @@ FAIL src/utils/page-path-validator.spec.ts
     Error: Not implemented
 ```
 
-✅ Tests fail as expected. Ready to implement.
+**✅ CHECKPOINT PASSED: Tests fail as expected. Ready to implement.**
+
+**❌ If tests pass or don't run: STOP. Fix the test first.**
 
 ## Step 4: Implement Minimal Code (GREEN)
 
+**⚠️ CHECKPOINT: Only write the MINIMUM code needed to pass the tests.**
+
 ```typescript
 export function validatePagePath(path: string): ValidationResult {
   if (!path) {
@@ -110,20 +183,35 @@ export function validatePagePath(path: string): ValidationResult {
 
 ## Step 5: Run Tests - Verify PASS
 
+**⚠️ MANDATORY: ALL tests MUST pass before proceeding to refactoring.**
+
 ```bash
 turbo run test --filter @growi/app -- src/utils/page-path-validator.spec.ts
 
 PASS  ✓ All tests passing!
 ```
 
+**✅ CHECKPOINT PASSED: Ready to refactor if needed.**
+
+**❌ If tests fail: Fix implementation, do NOT move to refactoring.**
+
 ## Step 6: Check Coverage
 
+**⚠️ MANDATORY: Verify test coverage meets requirements (80% minimum).**
+
 ```bash
 cd {package_dir} && pnpm vitest run --coverage src/utils/page-path-validator.spec.ts
 
 Coverage: 100% ✅ (Target: 80%)
 ```
 
+**✅ TDD CYCLE COMPLETE: All phases completed successfully.**
+
+- ✅ RED: Failing tests written
+- ✅ GREEN: Implementation passes tests
+- ✅ REFACTOR: Code improved (if needed)
+- ✅ COVERAGE: 80%+ achieved
+
 ## TDD Best Practices
 
 **DO:**
@@ -172,13 +260,26 @@ Coverage: 100% ✅ (Target: 80%)
 
 ## Important Notes
 
-**MANDATORY**: Tests must be written BEFORE implementation. The TDD cycle is:
+**MANDATORY - NO EXCEPTIONS**: The complete TDD cycle MUST be followed:
+
+1. **RED** - Write failing test FIRST
+2. **GREEN** - Implement minimal code to pass the test
+3. **REFACTOR** - Improve code while keeping tests green
+
+**Absolute Requirements:**
+- ❌ NEVER skip the RED phase
+- ❌ NEVER skip the GREEN phase
+- ❌ NEVER skip the REFACTOR phase
+- ❌ NEVER write implementation code before tests
+- ❌ NEVER proceed without explicit user approval if you cannot follow TDD
 
-1. **RED** - Write failing test
-2. **GREEN** - Implement to pass
-3. **REFACTOR** - Improve code
+**If you violate these rules:**
+1. STOP immediately
+2. Discard any implementation code written before tests
+3. Inform the user of the violation
+4. Start over with RED phase
 
-Never skip the RED phase. Never write code before tests.
+**This is a team development standard. Violations are not acceptable.**
 
 ## Related Skills
 

+ 22 - 0
.claude/hooks/session-start.sh

@@ -0,0 +1,22 @@
+#!/bin/bash
+set -euo pipefail
+
+# Only run in remote (Claude Code on the web) environments
+if [ "${CLAUDE_CODE_REMOTE:-}" != "true" ]; then
+  exit 0
+fi
+
+cd "$CLAUDE_PROJECT_DIR"
+
+# Install all workspace dependencies.
+# turbo (root devDependency) and all workspace packages will be installed.
+pnpm install
+
+# Install turbo globally (mirrors devcontainer postCreateCommand.sh) so it is
+# available as a bare `turbo` command in subsequent Claude tool calls.
+# Falls back to adding node_modules/.bin to PATH if the pnpm global store is
+# not yet configured in this environment.
+if ! command -v turbo &> /dev/null; then
+  pnpm install turbo --global 2>/dev/null \
+    || echo "export PATH=\"$CLAUDE_PROJECT_DIR/node_modules/.bin:\$PATH\"" >> "$CLAUDE_ENV_FILE"
+fi

+ 38 - 0
.claude/rules/testing.md

@@ -0,0 +1,38 @@
+# Testing Rules
+
+## Package Manager (CRITICAL)
+
+**NEVER use `npx` to run tests. ALWAYS use `pnpm`.**
+
+```bash
+# ❌ WRONG
+npx vitest run yjs.integ
+
+# ✅ CORRECT
+pnpm vitest run yjs.integ
+```
+
+## Test Execution Commands
+
+### Individual Test File (from package directory)
+
+```bash
+# Use partial file name - Vitest auto-matches
+pnpm vitest run yjs.integ
+pnpm vitest run helper.spec
+pnpm vitest run Button.spec
+
+# Flaky test detection
+pnpm vitest run yjs.integ --repeat=10
+```
+
+- Use **partial file name** (no `src/` prefix or full path needed)
+- No `--project` flag needed (Vitest auto-detects from file extension)
+
+### All Tests for a Package (from monorepo root)
+
+```bash
+turbo run test --filter @growi/app
+```
+
+For testing patterns (mocking, assertions, structure), see the `.claude/skills/learned/essential-test-patterns` skill.

+ 49 - 2
.claude/settings.json

@@ -1,5 +1,47 @@
 {
+  "permissions": {
+    "allow": [
+      "Bash(node --version)",
+      "Bash(npm --version)",
+      "Bash(npm view *)",
+      "Bash(pnpm --version)",
+      "Bash(turbo --version)",
+      "Bash(turbo run build)",
+      "Bash(turbo run lint)",
+      "Bash(pnpm run lint:*)",
+      "Bash(pnpm vitest run *)",
+      "Bash(pnpm biome check *)",
+      "Bash(pnpm ls *)",
+      "Bash(pnpm why *)",
+      "Bash(cat *)",
+      "Bash(echo *)",
+      "Bash(find *)",
+      "Bash(grep *)",
+      "Bash(git diff *)",
+      "Bash(gh issue view *)",
+      "Bash(gh pr view *)",
+      "Bash(gh pr diff *)",
+      "Bash(ls *)",
+      "WebFetch(domain:github.com)",
+      "mcp__context7__*",
+      "mcp__plugin_context7_*",
+      "mcp__github__*",
+      "WebSearch",
+      "WebFetch"
+    ]
+  },
+  "enableAllProjectMcpServers": true,
   "hooks": {
+    "SessionStart": [
+      {
+        "hooks": [
+          {
+            "type": "command",
+            "command": "$CLAUDE_PROJECT_DIR/.claude/hooks/session-start.sh"
+          }
+        ]
+      }
+    ],
     "PostToolUse": [
       {
         "matcher": "Write|Edit",
@@ -7,11 +49,16 @@
           {
             "type": "command",
             "command": "if [[ \"$FILE\" == */apps/* ]] || [[ \"$FILE\" == */packages/* ]]; then REPO_ROOT=$(echo \"$FILE\" | sed 's|/\\(apps\\|packages\\)/.*|/|'); cd \"$REPO_ROOT\" && pnpm biome check --write \"$FILE\" 2>/dev/null || true; fi",
-            "timeout": 30,
-            "description": "Auto-format edited files in apps/* and packages/* with Biome"
+            "timeout": 30
           }
         ]
       }
     ]
+  },
+  "enabledPlugins": {
+    "context7@claude-plugins-official": true,
+    "github@claude-plugins-official": true,
+    "typescript-lsp@claude-plugins-official": true,
+    "playwright@claude-plugins-official": true
   }
 }

+ 0 - 27
.claude/skills/learned/.gitkeep

@@ -1,27 +0,0 @@
-# Learned Skills Directory
-
-This directory contains Skills learned from development sessions using the `/learn` command.
-
-Each learned skill is automatically applied when working on related code based on its description.
-
-## Structure
-
-```
-learned/
-├── {topic-name-1}/
-│   └── SKILL.md
-├── {topic-name-2}/
-│   └── SKILL.md
-└── {topic-name-3}/
-    └── SKILL.md
-```
-
-## How Skills Are Created
-
-Use the `/learn` command after completing a feature or solving a complex problem:
-
-```
-/learn
-```
-
-Claude will extract reusable patterns and save them as Skills in this directory.

+ 122 - 0
.claude/skills/learned/essential-test-design/SKILL.md

@@ -0,0 +1,122 @@
+---
+name: essential-test-design
+description: Write tests that verify observable behavior (contract), not implementation details. Auto-invoked when writing or reviewing tests.
+---
+
+## Problem
+
+Tests that are tightly coupled to implementation details cause two failures:
+
+1. **False positives** — Tests pass even when behavior is broken (e.g., delay shortened but test still passes because it only checks `setTimeout` was called)
+2. **False negatives** — Tests fail even when behavior is correct (e.g., implementation switches from `setTimeout` to a `delay()` utility, spy breaks)
+
+Both undermine the purpose of testing: detecting regressions in behavior.
+
+## Principle: Test the Contract, Not the Mechanism
+
+A test is "essential" when it:
+- **Fails if the behavior degrades** (catches real bugs)
+- **Passes if the behavior is preserved** (survives refactoring)
+- **Does not depend on how the behavior is implemented** (implementation-agnostic)
+
+Ask: "What does the caller of this function experience?" — test that.
+
+## Anti-Patterns and Corrections
+
+### Anti-Pattern 1: Implementation Spy
+
+```typescript
+// BAD: Tests implementation, not behavior
+// Breaks if implementation changes from setTimeout to any other delay mechanism
+const spy = vi.spyOn(global, 'setTimeout');
+await exponentialBackoff(1);
+expect(spy).toHaveBeenCalledWith(expect.any(Function), 1000);
+```
+
+### Anti-Pattern 2: Arrange That Serves the Assert
+
+```typescript
+// BAD: The "arrange" is set up only to make the "assert" trivially pass
+// This is a self-fulfilling prophecy, not a meaningful test
+vi.advanceTimersByTime(1000);
+await promise;
+// No assertion — "it didn't throw" is not a valuable test
+```
+
+### Correct: Behavior Boundary Test
+
+```typescript
+// GOOD: Tests the observable contract
+// "Does not resolve before the expected delay, resolves at the expected delay"
+let resolved = false;
+mailService.exponentialBackoff(1).then(() => { resolved = true });
+
+await vi.advanceTimersByTimeAsync(999);
+expect(resolved).toBe(false);  // Catches: delay too short
+
+await vi.advanceTimersByTimeAsync(1);
+expect(resolved).toBe(true);   // Catches: delay too long or hangs
+```
+
+## Decision Framework
+
+When writing a test, ask these questions in order:
+
+1. **What is the contract?** — What does the caller expect to experience?
+   - e.g., "Wait for N ms before resolving"
+2. **What breakage should this test catch?** — Define the regression scenario
+   - e.g., "Someone changes the delay from 1000ms to 500ms"
+3. **Would this test still pass if I refactored the internals?** — If no, you're testing implementation
+   - e.g., Switching from `setTimeout` to `Bun.sleep()` shouldn't break the test
+4. **Would this test fail if the behavior degraded?** — If no, the test has no value
+   - e.g., If delay is halved, `expect(resolved).toBe(false)` at 999ms would catch it
+
+## Common Scenarios
+
+### Async Delay / Throttle / Debounce
+
+Use fake timers + boundary assertions (as shown above).
+
+### Data Transformation
+
+Assert on output shape/values, not on which internal helper was called.
+
+```typescript
+// BAD
+const spy = vi.spyOn(utils, 'formatDate');
+transform(input);
+expect(spy).toHaveBeenCalled();
+
+// GOOD
+const result = transform(input);
+expect(result.date).toBe('2026-01-01');
+```
+
+### Side Effects (API calls, DB writes)
+
+Mocking the boundary (API/DB) is acceptable — that IS the observable behavior.
+
+```typescript
+// OK: The contract IS "sends an email via mailer"
+expect(mockMailer.sendMail).toHaveBeenCalledWith(
+  expect.objectContaining({ to: 'user@example.com' })
+);
+```
+
+### Retry Logic
+
+Test the number of attempts and the final outcome, not the internal flow.
+
+```typescript
+// GOOD: Contract = "retries N times, then fails with specific error"
+mockMailer.sendMail.mockRejectedValue(new Error('fail'));
+await expect(sendWithRetry(config, 3)).rejects.toThrow('failed after 3 attempts');
+expect(mockMailer.sendMail).toHaveBeenCalledTimes(3);
+```
+
+## When to Apply
+
+- Writing new test cases for any function or method
+- Reviewing existing tests for flakiness or brittleness
+- Refactoring tests after fixing flaky CI failures
+- Code review of test pull requests

+ 68 - 10
.claude/skills/testing-patterns-with-vitest/SKILL.md → .claude/skills/learned/essential-test-patterns/SKILL.md

@@ -1,7 +1,6 @@
 ---
-name: testing-patterns-with-vitest
-description: GROWI testing patterns with Vitest, React Testing Library, and vitest-mock-extended. Auto-invoked for all GROWI development work.
-user-invocable: false
+name: essential-test-patterns
+description: GROWI testing patterns with Vitest, React Testing Library, and vitest-mock-extended.
 ---
 
 # GROWI Testing Patterns
@@ -373,6 +372,71 @@ vi.mock('~/utils/myUtils', () => ({
 }));
 ```
 
+### Mocking CommonJS Modules with mock-require
+
+**IMPORTANT**: When `vi.mock()` fails with ESModule/CommonJS compatibility issues, use `mock-require` instead:
+
+```typescript
+import mockRequire from 'mock-require';
+
+describe('Service with CommonJS dependencies', () => {
+  beforeEach(() => {
+    // Mock CommonJS module before importing the code under test
+    mockRequire('legacy-module', {
+      someFunction: vi.fn().mockReturnValue('mocked'),
+      someProperty: 'mocked-value',
+    });
+  });
+
+  afterEach(() => {
+    // Clean up mocks to avoid leakage between tests
+    mockRequire.stopAll();
+  });
+
+  it('should use mocked module', async () => {
+    // Import AFTER mocking (dynamic import if needed)
+    const { MyService } = await import('~/services/MyService');
+
+    const result = MyService.doSomething();
+    expect(result).toBe('mocked');
+  });
+});
+```
+
+**When to use `mock-require`**:
+- Legacy CommonJS modules that don't work with `vi.mock()`
+- Mixed ESM/CJS environments causing module resolution issues
+- Third-party libraries with complex module systems
+- When `vi.mock()` fails with "Cannot redefine property" or "Module is not defined"
+
+**Key points**:
+- ✅ Mock **before** importing the code under test
+- ✅ Use `mockRequire.stopAll()` in `afterEach()` to prevent test leakage
+- ✅ Use dynamic imports (`await import()`) when needed
+- ✅ Works with both CommonJS and ESModule targets
+
+### Choosing the Right Mocking Strategy
+
+```typescript
+// ✅ Prefer vi.mock() for ESModules (simplest)
+vi.mock('~/modern-module', () => ({
+  myFunction: vi.fn(),
+}));
+
+// ✅ Use mock-require for CommonJS or mixed environments
+import mockRequire from 'mock-require';
+mockRequire('legacy-module', { myFunction: vi.fn() });
+
+// ✅ Use vitest-mock-extended for type-safe object mocks
+import { mockDeep } from 'vitest-mock-extended';
+const mockService = mockDeep<MyService>();
+```
+
+**Decision tree**:
+1. Can use `vi.mock()`? → Use it (simplest)
+2. CommonJS or module error? → Use `mock-require`
+3. Need type-safe object mock? → Use `vitest-mock-extended`
+
 ## Integration Tests (with Database)
 
 Integration tests (*.integ.ts) can access in-memory databases:
@@ -415,13 +479,7 @@ Before committing tests, ensure:
 
 ## Running Tests
 
-```bash
-# Run all tests for a package
-turbo run test --filter @growi/app
-
-# Run specific test file
-cd {package_dir} && pnpm vitest run src/components/Button/Button.spec.tsx
-```
+See the `testing` rule (`.claude/rules/testing.md`) for test execution commands.
 
 ## Summary: GROWI Testing Philosophy
 

+ 22 - 4
.claude/skills/monorepo-overview/SKILL.md

@@ -99,11 +99,28 @@ This enables better code splitting and prevents server-only code from being bund
 
 Common code should be extracted to `packages/`:
 
-- **core**: Utilities, constants, type definitions
+- **core**: Domain hub (see below)
 - **ui**: Reusable React components
 - **editor**: Markdown editor
 - **pluginkit**: Plugin system framework
 
+#### @growi/core — Domain & Utilities Hub
+
+`@growi/core` is the foundational shared package depended on by all other packages (10 consumers). Its responsibilities:
+
+- **Domain type definitions** — Single source of truth for cross-package interfaces (`IPage`, `IUser`, `IRevision`, `Ref<T>`, `HasObjectId`, etc.)
+- **Cross-cutting utilities** — Pure functions for page path validation, ObjectId checks, serialization (e.g., `serializeUserSecurely()`)
+- **System constants** — File types, plugin configs, scope enums
+- **Global type augmentations** — Runtime/polyfill type declarations visible to all consumers (e.g., `RegExp.escape()` via `declare global` in `index.ts`)
+
+Key patterns:
+
+1. **Shared types and global augmentations go in `@growi/core`** — Not duplicated per-package. `declare global` in `index.ts` propagates to all consumers through the module graph.
+2. **Subpath exports for granular imports** — `@growi/core/dist/utils/page-path-utils` instead of barrel imports from root.
+3. **Minimal runtime dependencies** — Only `bson-objectid`; ~70% types. Safe to import from both server and client contexts.
+4. **Server-specific interfaces are namespaced** — Under `interfaces/server/`.
+5. **Dual format (ESM + CJS)** — Built via Vite with `preserveModules: true` and `vite-plugin-dts` (`copyDtsFiles: true`).
+
 ## Version Management with Changeset
 
 GROWI uses **Changesets** for version management and release notes:
@@ -172,10 +189,11 @@ turbo run bootstrap
 # Start all dev servers (apps/app + dependencies)
 turbo run dev
 
-# Run tests for specific package
-turbo run test --filter @growi/app
+# Run a specific test file (from package directory)
+pnpm vitest run yjs.integ
 
-# Lint specific package
+# Run ALL tests / lint for a package
+turbo run test --filter @growi/app
 turbo run lint --filter @growi/core
 ```
 

+ 11 - 3
.claude/skills/tech-stack/SKILL.md

@@ -38,7 +38,7 @@ user-invocable: false
 ## Build & Development Tools
 
 ### Package Management
-- **pnpm** 10.4.1 - Package manager (faster, more efficient than npm/yarn)
+- **pnpm** Package manager (faster, more efficient than npm/yarn)
 
 ### Monorepo Orchestration
 - **Turborepo** ^2.1.3 - Build system with caching and parallelization
@@ -101,7 +101,12 @@ turbo run bootstrap
 ### Testing & Quality
 
 ```bash
-# Run tests for specific package
+# Run a specific test file (from package directory, e.g. apps/app)
+pnpm vitest run yjs.integ          # Partial file name match
+pnpm vitest run helper.spec        # Works for any test file
+pnpm vitest run yjs.integ --repeat=10  # Repeat for flaky test detection
+
+# Run ALL tests for a package (uses Turborepo caching)
 turbo run test --filter @growi/app
 
 # Run linters for specific package
@@ -182,9 +187,12 @@ Package-specific tsconfig.json example:
 
 ### Command Usage
 
-1. **Always use Turborepo for cross-package tasks**:
+1. **Use Turborepo for full-package tasks** (all tests, lint, build):
    - ✅ `turbo run test --filter @growi/app`
    - ❌ `cd apps/app && pnpm test` (bypasses Turborepo caching)
+2. **Use vitest directly for individual test files** (from package directory):
+   - ✅ `pnpm vitest run yjs.integ` (simple, fast)
+   - ❌ `turbo run test --filter @growi/app -- yjs.integ` (unnecessary overhead)
 
 2. **Use pnpm for package management**:
    - ✅ `pnpm install`

+ 5 - 2
.devcontainer/app/devcontainer.json

@@ -8,8 +8,9 @@
 
   "features": {
     "ghcr.io/devcontainers/features/node:1": {
-      "version": "20.18.3"
-    }
+      "version": "24.14.0"
+    },
+    "ghcr.io/devcontainers/features/github-cli:1": {}
   },
 
   // Use 'forwardPorts' to make a list of ports inside the container available locally.
@@ -30,6 +31,8 @@
         "editorconfig.editorconfig",
         "shinnn.stylelint",
         "stylelint.vscode-stylelint",
+        // markdown
+        "bierner.markdown-mermaid",
         // TypeScript (Native Preview)
         "typescriptteam.native-preview",
         // Test

+ 4 - 0
.devcontainer/pdf-converter/devcontainer.json

@@ -4,6 +4,10 @@
   "service": "pdf-converter",
   "workspaceFolder": "/workspace/growi",
 
+  "features": {
+    "ghcr.io/devcontainers/features/github-cli:1": {}
+  },
+
   // Use 'forwardPorts' to make a list of ports inside the container available locally.
   // "forwardPorts": [],
 

+ 5 - 5
.github/mergify.yml

@@ -6,17 +6,17 @@ queue_rules:
       - check-success ~= ci-app-launch-dev
       - -check-failure ~= ci-app-
       - -check-failure ~= ci-slackbot-
-      - -check-failure ~= test-prod-node20 /
+      - -check-failure ~= test-prod-node24 /
     merge_conditions:
       - check-success ~= ci-app-lint
       - check-success ~= ci-app-test
       - check-success ~= ci-app-launch-dev
-      - check-success = test-prod-node20 / build-prod
-      - check-success ~= test-prod-node20 / launch-prod
-      - check-success ~= test-prod-node20 / run-playwright
+      - check-success = test-prod-node24 / build-prod
+      - check-success ~= test-prod-node24 / launch-prod
+      - check-success ~= test-prod-node24 / run-playwright
       - -check-failure ~= ci-app-
       - -check-failure ~= ci-slackbot-
-      - -check-failure ~= test-prod-node20 /
+      - -check-failure ~= test-prod-node24 /
 
 pull_request_rules:
   - name: Automatic queue to merge

+ 15 - 32
.github/workflows/ci-app-prod.yml

@@ -39,22 +39,21 @@ concurrency:
 
 jobs:
 
-  test-prod-node18:
-    uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@master
-    if: |
-      ( github.event_name == 'push'
-        || github.base_ref == 'master'
-        || github.base_ref == 'dev/7.*.x'
-        || startsWith( github.base_ref, 'release/' )
-        || startsWith( github.head_ref, 'mergify/merge-queue/' ))
-    with:
-      node-version: 18.x
-      skip-e2e-test: true
-    secrets:
-      SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
-
+  # test-prod-node22:
+  #   uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@master
+  #   if: |
+  #     ( github.event_name == 'push'
+  #       || github.base_ref == 'master'
+  #       || github.base_ref == 'dev/7.*.x'
+  #       || startsWith( github.base_ref, 'release/' )
+  #       || startsWith( github.head_ref, 'mergify/merge-queue/' ))
+  #   with:
+  #     node-version: 22.x
+  #     skip-e2e-test: true
+  #   secrets:
+  #     SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
 
-  test-prod-node20:
+  test-prod-node24:
     uses: growilabs/growi/.github/workflows/reusable-app-prod.yml@master
     if: |
       ( github.event_name == 'push'
@@ -63,23 +62,7 @@ jobs:
         || startsWith( github.base_ref, 'release/' )
         || startsWith( github.head_ref, 'mergify/merge-queue/' ))
     with:
-      node-version: 20.x
+      node-version: 24.x
       skip-e2e-test: ${{ contains( github.event.pull_request.labels.*.name, 'dependencies' ) }}
     secrets:
       SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
-
-  # run-reg-suit-node20:
-  #   needs: [test-prod-node20]
-
-  #   uses: growilabs/growi/.github/workflows/reusable-app-reg-suit.yml@master
-
-  #   if: always()
-
-  #   with:
-  #     node-version: 20.x
-  #     skip-reg-suit: ${{ contains( github.event.pull_request.labels.*.name, 'dependencies' ) }}
-  #   secrets:
-  #     REG_NOTIFY_GITHUB_PLUGIN_CLIENTID: ${{ secrets.REG_NOTIFY_GITHUB_PLUGIN_CLIENTID }}
-  #     AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
-  #     AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
-  #     SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

+ 3 - 3
.github/workflows/ci-app.yml

@@ -44,7 +44,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [20.x]
+        node-version: [24.x]
 
     steps:
       - uses: actions/checkout@v4
@@ -92,7 +92,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [20.x]
+        node-version: [24.x]
         mongodb-version: ['6.0', '8.0']
 
     services:
@@ -157,7 +157,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [20.x]
+        node-version: [24.x]
         mongodb-version: ['6.0', '8.0']
 
     services:

+ 4 - 4
.github/workflows/ci-pdf-converter.yml

@@ -29,7 +29,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [20.x]
+        node-version: [24.x]
 
     steps:
     - uses: actions/checkout@v4
@@ -65,7 +65,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [20.x]
+        node-version: [24.x]
 
     steps:
     - uses: actions/checkout@v4
@@ -104,7 +104,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [20.x]
+        node-version: [24.x]
 
     steps:
     - uses: actions/checkout@v4
@@ -142,7 +142,7 @@ jobs:
     - name: Assembling all dependencies
       run: |
         rm -rf out
-        pnpm deploy out --prod --filter @growi/pdf-converter
+        pnpm deploy out --prod --legacy --filter @growi/pdf-converter
         rm -rf apps/pdf-converter/node_modules && mv out/node_modules apps/pdf-converter/node_modules
 
     - name: pnpm run start:prod:ci

+ 4 - 4
.github/workflows/ci-slackbot-proxy.yml

@@ -30,7 +30,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [20.x]
+        node-version: [24.x]
 
     steps:
     - uses: actions/checkout@v4
@@ -85,7 +85,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [20.x]
+        node-version: [24.x]
 
     services:
       mysql:
@@ -163,7 +163,7 @@ jobs:
 
     strategy:
       matrix:
-        node-version: [20.x]
+        node-version: [24.x]
 
     services:
       mysql:
@@ -211,7 +211,7 @@ jobs:
     - name: Assembling all dependencies
       run: |
         rm -rf out
-        pnpm deploy out --prod --filter @growi/slackbot-proxy
+        pnpm deploy out --prod --legacy --filter @growi/slackbot-proxy
         rm -rf apps/slackbot-proxy/node_modules && mv out/node_modules apps/slackbot-proxy/node_modules
 
     - name: pnpm run start:prod:ci

+ 2 - 2
.github/workflows/release-rc.yml

@@ -37,7 +37,7 @@ jobs:
           type=raw,value=${{ steps.package-json.outputs.packageVersion }}.{{sha}}
 
   build-image-rc:
-    uses: growilabs/growi/.github/workflows/reusable-app-build-image.yml@master
+    uses: growilabs/growi/.github/workflows/reusable-app-build-image.yml@rc/v7.5.x-node24
     with:
       image-name: growilabs/growi
       tag-temporary: latest-rc
@@ -47,7 +47,7 @@ jobs:
   publish-image-rc:
     needs: [determine-tags, build-image-rc]
 
-    uses: growilabs/growi/.github/workflows/reusable-app-create-manifests.yml@master
+    uses: growilabs/growi/.github/workflows/reusable-app-create-manifests.yml@rc/v7.5.x-node24
     with:
       tags: ${{ needs.determine-tags.outputs.TAGS }}
       registry: docker.io

+ 2 - 2
.github/workflows/release-subpackages.yml

@@ -32,7 +32,7 @@ jobs:
 
     - uses: actions/setup-node@v4
       with:
-        node-version: '20'
+        node-version: '24'
         cache: 'pnpm'
 
     - name: Install dependencies
@@ -75,7 +75,7 @@ jobs:
 
     - uses: actions/setup-node@v4
       with:
-        node-version: '20'
+        node-version: '24'
         cache: 'pnpm'
 
     - name: Install dependencies

+ 1 - 1
.github/workflows/reusable-app-build-image.yml

@@ -48,7 +48,7 @@ jobs:
         projectName: growi-official-image-builder
       env:
         CODEBUILD__sourceVersion: ${{ inputs.source-version }}
-        CODEBUILD__imageOverride: ${{ (matrix.platform == 'amd64' && 'aws/codebuild/amazonlinux2-x86_64-standard:4.0') || 'aws/codebuild/amazonlinux2-aarch64-standard:2.0' }}
+        CODEBUILD__imageOverride: ${{ (matrix.platform == 'amd64' && 'aws/codebuild/amazonlinux2-x86_64-standard:5.0') || 'aws/codebuild/amazonlinux2-aarch64-standard:3.0' }}
         CODEBUILD__environmentTypeOverride: ${{ (matrix.platform == 'amd64' && 'LINUX_CONTAINER') || 'ARM_CONTAINER' }}
         CODEBUILD__environmentVariablesOverride: '[
           { "name": "IMAGE_TAG", "type": "PLAINTEXT", "value": "docker.io/${{ inputs.image-name }}:${{ inputs.tag-temporary }}-${{ matrix.platform }}" }

+ 22 - 27
.github/workflows/reusable-app-prod.yml

@@ -16,7 +16,7 @@ on:
       node-version:
         required: true
         type: string
-        default: 22.x
+        default: 24.x
       skip-e2e-test:
         type: boolean
         default: false
@@ -57,17 +57,18 @@ jobs:
       env:
         ANALYZE: 1
 
-    - name: Assembling all dependencies
-      run: |
-        rm -rf out
-        pnpm deploy out --prod --filter @growi/app
-        rm -rf apps/app/node_modules && mv out/node_modules apps/app/node_modules
+    - name: Assemble production artifacts
+      run: bash apps/app/bin/assemble-prod.sh
+
+    - name: Check for broken symlinks in .next/node_modules
+      run: bash apps/app/bin/check-next-symlinks.sh
 
     - name: Archive production files
       id: archive-prod-files
       run: |
         tar -zcf production.tar.gz --exclude ./apps/app/.next/cache \
           package.json \
+          node_modules \
           apps/app/.next \
           apps/app/config \
           apps/app/dist \
@@ -76,6 +77,7 @@ jobs:
           apps/app/tmp \
           apps/app/.env.production* \
           apps/app/node_modules \
+          apps/app/next.config.js \
           apps/app/package.json
         echo "file=production.tar.gz" >> $GITHUB_OUTPUT
 
@@ -124,30 +126,22 @@ jobs:
           discovery.type: single-node
 
     steps:
-    - uses: actions/checkout@v4
-
-    - uses: pnpm/action-setup@v4
-
     - uses: actions/setup-node@v4
       with:
         node-version: ${{ inputs.node-version }}
-        cache: 'pnpm'
-
-    # avoid setup-node cache failure; see: https://github.com/actions/setup-node/issues/1137
-    - name: Verify PNPM Cache Directory
-      run: |
-        PNPM_STORE_PATH="$( pnpm store path --silent )"
-        [ -d "$PNPM_STORE_PATH" ] || mkdir -vp "$PNPM_STORE_PATH"
 
     - name: Download production files artifact
       uses: actions/download-artifact@v4
       with:
         name: Production Files (node${{ inputs.node-version }})
 
-    - name: Extract procution files
+    - name: Extract production files
       run: |
         tar -xf ${{ needs.build-prod.outputs.PROD_FILES }}
 
+    # Run after extraction so pnpm/action-setup@v4 can read packageManager from package.json
+    - uses: pnpm/action-setup@v4
+
     - name: pnpm run server:ci
       working-directory: ./apps/app
       run: |
@@ -179,7 +173,7 @@ jobs:
     container:
       # Match the Playwright version
       # https://github.com/microsoft/playwright/issues/20010
-      image: mcr.microsoft.com/playwright:v1.49.1-jammy
+      image: mcr.microsoft.com/playwright:v1.58.2-jammy
 
     strategy:
       fail-fast: false
@@ -223,14 +217,14 @@ jobs:
       with:
         name: Production Files (node${{ inputs.node-version }})
 
-    - name: Extract procution files
+    - name: Extract production files to isolated directory
       run: |
-        tar -xf ${{ needs.build-prod.outputs.PROD_FILES }}
+        mkdir -p /tmp/growi-prod
+        tar -xf ${{ needs.build-prod.outputs.PROD_FILES }} -C /tmp/growi-prod
 
     - name: Copy dotenv file for ci
-      working-directory: ./apps/app
       run: |
-        cat config/ci/.env.local.for-ci >> .env.production.local
+        cat apps/app/config/ci/.env.local.for-ci >> /tmp/growi-prod/apps/app/.env.production.local
 
     - name: Playwright Run (--project=chromium/installer)
       if: ${{ matrix.browser == 'chromium' }}
@@ -240,13 +234,13 @@ jobs:
       env:
         DEBUG: pw:api
         HOME: /root # ref: https://github.com/microsoft/playwright/issues/6500
+        GROWI_WEBSERVER_COMMAND: 'cd /tmp/growi-prod/apps/app && pnpm run server'
         MONGO_URI: mongodb://mongodb:27017/growi-playwright-installer
         ELASTICSEARCH_URI: http://localhost:${{ job.services.elasticsearch.ports['9200'] }}/growi
 
     - name: Copy dotenv file for automatic installation
-      working-directory: ./apps/app
       run: |
-        cat config/ci/.env.local.for-auto-install >> .env.production.local
+        cat apps/app/config/ci/.env.local.for-auto-install >> /tmp/growi-prod/apps/app/.env.production.local
 
     - name: Playwright Run
       working-directory: ./apps/app
@@ -255,13 +249,13 @@ jobs:
       env:
         DEBUG: pw:api
         HOME: /root # ref: https://github.com/microsoft/playwright/issues/6500
+        GROWI_WEBSERVER_COMMAND: 'cd /tmp/growi-prod/apps/app && pnpm run server'
         MONGO_URI: mongodb://mongodb:27017/growi-playwright
         ELASTICSEARCH_URI: http://localhost:${{ job.services.elasticsearch.ports['9200'] }}/growi
 
     - name: Copy dotenv file for automatic installation with allowing guest mode
-      working-directory: ./apps/app
       run: |
-        cat config/ci/.env.local.for-auto-install-with-allowing-guest >> .env.production.local
+        cat apps/app/config/ci/.env.local.for-auto-install-with-allowing-guest >> /tmp/growi-prod/apps/app/.env.production.local
 
     - name: Playwright Run (--project=${browser}/guest-mode)
       working-directory: ./apps/app
@@ -270,6 +264,7 @@ jobs:
       env:
         DEBUG: pw:api
         HOME: /root # ref: https://github.com/microsoft/playwright/issues/6500
+        GROWI_WEBSERVER_COMMAND: 'cd /tmp/growi-prod/apps/app && pnpm run server'
         MONGO_URI: mongodb://mongodb:27017/growi-playwright-guest-mode
         ELASTICSEARCH_URI: http://localhost:${{ job.services.elasticsearch.ports['9200'] }}/growi
 

+ 1 - 0
.gitignore

@@ -2,6 +2,7 @@
 
 # dependencies
 node_modules
+node_modules.*
 /.pnp
 .pnp.js
 .pnpm-store

+ 268 - 0
.kiro/specs/collaborative-editor/design.md

@@ -0,0 +1,268 @@
+# Design Document: collaborative-editor
+
+## Overview
+
+**Purpose**: Real-time collaborative editing in GROWI, allowing multiple users to simultaneously edit the same wiki page with automatic conflict resolution via Yjs CRDT.
+
+**Users**: All GROWI users who use real-time collaborative page editing. System operators manage the WebSocket and persistence infrastructure.
+
+**Impact**: Yjs document synchronization over native WebSocket (`y-websocket`), with Socket.IO continuing to serve non-Yjs real-time events (page room broadcasts, notifications).
+
+### Goals
+- Guarantee a single server-side Y.Doc per page — no split-brain desynchronization
+- Provide real-time bidirectional sync for all connected editors
+- Authenticate and authorize WebSocket connections using existing session infrastructure
+- Persist draft state to MongoDB for durability across reconnections and restarts
+- Bridge awareness/presence events to non-editor UI via Socket.IO rooms
+
+### Non-Goals
+- Changing the Yjs document model, CodeMirror integration, or page save/revision logic
+- Migrating Socket.IO-based UI events to WebSocket
+- Changing the `yjs-writings` MongoDB collection schema or data format
+
+## Architecture
+
+### Architecture Diagram
+
+```mermaid
+graph TB
+    subgraph Client
+        CM[CodeMirror Editor]
+        WP[WebsocketProvider]
+        GS[Global Socket.IO Client]
+    end
+
+    subgraph Server
+        subgraph HTTP Server
+            Express[Express App]
+            SIO[Socket.IO Server]
+            WSS[WebSocket Server - ws]
+        end
+
+        subgraph YjsService
+            UpgradeHandler[Upgrade Handler - Auth]
+            ConnHandler[Connection Handler]
+            DocManager[Document Manager - getYDoc]
+            AwarenessBridge[Awareness Bridge]
+        end
+
+        MDB[(MongoDB - yjs-writings)]
+        SessionStore[(Session Store)]
+    end
+
+    CM --> WP
+    WP -->|ws path yjs pageId| WSS
+    GS -->|socket.io| SIO
+
+    WSS -->|upgrade auth| UpgradeHandler
+    UpgradeHandler -->|parse cookie| SessionStore
+    WSS -->|connection| ConnHandler
+    ConnHandler --> DocManager
+    DocManager --> MDB
+
+    AwarenessBridge -->|io.in room .emit| SIO
+
+    DocManager -->|awareness events| AwarenessBridge
+```
+
+**Key architectural properties**:
+- **Dual transport**: WebSocket for Yjs sync (`/yjs/{pageId}`), Socket.IO for UI events (`/socket.io/`)
+- **Singleton YjsService**: Encapsulates all Yjs document management
+- **Atomic document creation**: `map.setIfUndefined` from lib0 — synchronous get-or-create, no race condition window
+- **Session-based auth**: Cookie parsed from HTTP upgrade request, same session store as Express
+
+### Technology Stack
+
+| Layer | Choice / Version | Role |
+|-------|------------------|------|
+| Client Provider | `y-websocket@^2.x` (WebsocketProvider) | Yjs document sync over WebSocket |
+| Server WebSocket | `ws@^8.x` (WebSocket.Server) | Native WebSocket server, `noServer: true` mode |
+| Server Yjs Utils | `y-websocket@^2.x` (`bin/utils`) | `setupWSConnection`, `getYDoc`, `WSSharedDoc` |
+| Persistence | `y-mongodb-provider` (extended) | Yjs document persistence to `yjs-writings` collection |
+| Event Bridge | Socket.IO `io` instance | Awareness state broadcasting to page rooms |
+| Auth | express-session + passport | WebSocket upgrade authentication via cookie |
+
+## System Flows
+
+### Client Connection Flow
+
+```mermaid
+sequenceDiagram
+    participant C as Client Browser
+    participant WSS as WebSocket Server
+    participant UH as Upgrade Handler
+    participant SS as Session Store
+    participant DM as Document Manager
+    participant MDB as MongoDB
+
+    C->>WSS: HTTP Upgrade GET /yjs/pageId
+    WSS->>UH: upgrade event
+    UH->>SS: Parse cookie, load session
+    SS-->>UH: Session with user
+    UH->>UH: Check page access
+    alt Unauthorized
+        UH-->>C: 401/403, destroy socket
+    else Authorized
+        UH->>WSS: handleUpgrade
+        WSS->>DM: setupWSConnection
+        DM->>DM: getYDoc - atomic get or create
+        alt New document
+            DM->>MDB: bindState - load persisted state
+            MDB-->>DM: Y.Doc state
+        end
+        DM-->>C: Sync Step 1 - state vector
+        C-->>DM: Sync Step 2 - diff
+        DM-->>C: Awareness states
+    end
+```
+
+Authentication happens before `handleUpgrade` — unauthorized connections never reach the Yjs layer. Document creation uses `getYDoc`'s atomic `map.setIfUndefined` pattern.
+
+### Document Lifecycle
+
+```mermaid
+stateDiagram-v2
+    [*] --> Created: First client connects
+    Created --> Active: bindState completes
+    Active --> Active: Clients connect/disconnect
+    Active --> Flushing: Last client disconnects
+    Flushing --> [*]: writeState completes, doc destroyed
+    Flushing --> Active: New client connects before destroy
+```
+
+## Components and Interfaces
+
+| Component | Layer | Intent | Key Dependencies |
+|-----------|-------|--------|-----------------|
+| YjsService | Server / Service | Orchestrates Yjs document lifecycle, exposes public API | ws, y-websocket/bin/utils, MongodbPersistence |
+| UpgradeHandler | Server / Auth | Authenticates and authorizes WebSocket upgrade requests | express-session, passport, Page model |
+| guardSocket | Server / Util | Prevents socket closure by other upgrade handlers during async auth | — |
+| PersistenceAdapter | Server / Data | Bridges MongodbPersistence to y-websocket persistence interface | MongodbPersistence, syncYDoc, Socket.IO io |
+| AwarenessBridge | Server / Events | Bridges y-websocket awareness events to Socket.IO rooms | Socket.IO io |
+| use-collaborative-editor-mode | Client / Hook | Manages WebsocketProvider lifecycle and awareness | y-websocket, yjs |
+
+### YjsService
+
+**Intent**: Manages Yjs document lifecycle, WebSocket server setup, and public API for page save/status integration.
+
+**Responsibilities**:
+- Owns the `ws.WebSocketServer` instance and the y-websocket `docs` Map
+- Initializes persistence via y-websocket's `setPersistence`
+- Registers the HTTP `upgrade` handler (delegating auth to UpgradeHandler)
+- Exposes the same public interface as `IYjsService` for downstream consumers
+
+**Service Interface**:
+
+```typescript
+interface IYjsService {
+  getYDocStatus(pageId: string): Promise<YDocStatus>;
+  syncWithTheLatestRevisionForce(
+    pageId: string,
+    editingMarkdownLength?: number,
+  ): Promise<SyncLatestRevisionBody>;
+  getCurrentYdoc(pageId: string): Y.Doc | undefined;
+}
+```
+
+- Constructor accepts `httpServer: http.Server` and `io: Server`
+- Uses `WebSocket.Server({ noServer: true })` + y-websocket utils
+- Uses `httpServer.on('upgrade', ...)` with path check for `/yjs/`
+- **CRITICAL**: Socket.IO server must set `destroyUpgrade: false` to prevent engine.io from destroying non-Socket.IO upgrade requests
+
+### UpgradeHandler
+
+**Intent**: Authenticates WebSocket upgrade requests using session cookies and verifies page access.
+
+**Interface**:
+
+```typescript
+type UpgradeResult =
+  | { authorized: true; request: AuthenticatedRequest; pageId: string }
+  | { authorized: false; statusCode: number };
+```
+
+- Runs express-session and passport middleware via `runMiddleware` helper against raw `IncomingMessage`
+- `writeErrorResponse` writes HTTP status line only — socket cleanup deferred to caller (works with `guardSocket`)
+- Guest access: if `user` is undefined but page allows guest access, authorization proceeds
+
+### guardSocket
+
+**Intent**: Prevents other synchronous upgrade handlers from closing the socket during async auth.
+
+**Why this exists**: Node.js EventEmitter fires all `upgrade` listeners synchronously. When the Yjs async handler yields at its first `await`, Next.js's `NextCustomServer.upgradeHandler` runs and calls `socket.end()` for unrecognized paths. This destroys the socket before Yjs auth completes.
+
+**How it works**: Temporarily replaces `socket.end()` and `socket.destroy()` with no-ops before the first `await`. After auth completes, `restore()` reinstates the original methods.
+
+```typescript
+const guard = guardSocket(socket);
+const result = await handleUpgrade(request, socket, head);
+guard.restore();
+```
+
+### PersistenceAdapter
+
+**Intent**: Adapts MongodbPersistence to y-websocket's persistence interface (`bindState`, `writeState`).
+
+**Interface**:
+
+```typescript
+interface YWebsocketPersistence {
+  bindState: (docName: string, ydoc: Y.Doc) => void;
+  writeState: (docName: string, ydoc: Y.Doc) => Promise<void>;
+  provider: MongodbPersistence;
+}
+```
+
+**Key behavior**:
+- `bindState`: Loads persisted state → determines YDocStatus → calls `syncYDoc` → registers awareness event bridge
+- `writeState`: Flushes document state to MongoDB on last-client disconnect
+- Ordering within `bindState` is guaranteed (persistence load → sync → awareness registration)
+
+### AwarenessBridge
+
+**Intent**: Bridges y-websocket per-document awareness events to Socket.IO room broadcasts.
+
+**Published events** (to Socket.IO rooms):
+- `YjsAwarenessStateSizeUpdated` with `awarenessStateSize: number`
+- `YjsHasYdocsNewerThanLatestRevisionUpdated` with `hasNewerYdocs: boolean`
+
+**Subscribed events** (from y-websocket):
+- `WSSharedDoc.awareness.on('update', ...)` — per-document awareness changes
+
+### use-collaborative-editor-mode (Client Hook)
+
+**Intent**: Manages WebsocketProvider lifecycle, awareness state, and CodeMirror extensions.
+
+**Key details**:
+- WebSocket URL: `${wsProtocol}//${window.location.host}/yjs`, room name: `pageId`
+- Options: `connect: true`, `resyncInterval: 3000`
+- Awareness API: `provider.awareness.setLocalStateField`, `.on('update', ...)`
+- All side effects (provider creation, awareness setup) must be outside React state updaters to avoid render-phase violations
+
+## Data Models
+
+No custom data models. Uses the existing `yjs-writings` MongoDB collection via `MongodbPersistence` (extended `y-mongodb-provider`). Collection schema, indexes, and persistence interface (`bindState` / `writeState`) are unchanged.
+
+## Error Handling
+
+| Error Type | Scenario | Response |
+|------------|----------|----------|
+| Auth Failure | Invalid/expired session cookie | 401 on upgrade, socket destroyed |
+| Access Denied | User lacks page access | 403 on upgrade, socket destroyed |
+| Persistence Error | MongoDB read failure in bindState | Log error, serve empty doc (clients sync from each other) |
+| WebSocket Close | Client network failure | Automatic reconnect with exponential backoff (WebsocketProvider built-in) |
+| Document Not Found | getCurrentYdoc for non-active doc | Return undefined |
+
+## Requirements Traceability
+
+| Requirement | Summary | Components |
+|-------------|---------|------------|
+| 1.1, 1.2 | Single Y.Doc per page | DocumentManager (getYDoc atomic pattern) |
+| 1.3, 1.4, 1.5 | Sync integrity on reconnect | DocumentManager, WebsocketProvider |
+| 2.1, 2.2 | y-websocket transport | YjsService, use-collaborative-editor-mode |
+| 2.3 | Coexist with Socket.IO | UpgradeHandler, guardSocket |
+| 2.4 | resyncInterval | WebsocketProvider |
+| 3.1-3.4 | Auth on upgrade | UpgradeHandler |
+| 4.1-4.5 | MongoDB persistence | PersistenceAdapter |
+| 5.1-5.4 | Awareness and presence | AwarenessBridge, use-collaborative-editor-mode |
+| 6.1-6.4 | YDoc status and sync | YjsService |

+ 79 - 0
.kiro/specs/collaborative-editor/requirements.md

@@ -0,0 +1,79 @@
+# Requirements Document
+
+## Introduction
+
+GROWI provides real-time collaborative editing powered by Yjs, allowing multiple users to simultaneously edit the same wiki page with automatic conflict resolution. The collaborative editing system uses `y-websocket` as the Yjs transport layer over native WebSocket, with MongoDB persistence for draft state and Socket.IO bridging for awareness/presence events to non-editor UI components.
+
+**Scope**: Server-side Yjs document management, client-side Yjs provider, WebSocket authentication, MongoDB persistence integration, and awareness/presence tracking.
+
+**Out of Scope**: The Yjs document model itself, CodeMirror editor integration details, page save/revision logic, or the global Socket.IO infrastructure used for non-Yjs events.
+
+## Requirements
+
+### Requirement 1: Document Synchronization Integrity
+
+**Objective:** As a wiki user editing collaboratively, I want all clients editing the same page to always share a single server-side Y.Doc instance, so that edits are never lost due to document desynchronization.
+
+#### Acceptance Criteria
+
+1. When multiple clients connect to the same page simultaneously, the Yjs Service shall ensure that exactly one Y.Doc instance exists on the server for that page.
+2. When a client connects while another client's document initialization is in progress, the Yjs Service shall return the same Y.Doc instance to both clients without creating a duplicate.
+3. When a client reconnects after a brief network disconnection, the Yjs Service shall synchronize the client with the existing server-side Y.Doc containing all other clients' changes.
+4. While multiple clients are editing the same page, the Yjs Service shall propagate each client's changes to all other connected clients in real time.
+5. If a client's WebSocket connection drops and reconnects, the Yjs Service shall not destroy the server-side Y.Doc while other clients remain connected.
+
+### Requirement 2: WebSocket Transport Layer
+
+**Objective:** As a system operator, I want the collaborative editing transport to use y-websocket over native WebSocket, so that the system benefits from active maintenance and atomic document initialization.
+
+#### Acceptance Criteria
+
+1. The Yjs Service shall use `y-websocket` server utilities as the server-side Yjs transport.
+2. The Editor Client shall use `y-websocket`'s `WebsocketProvider` as the client-side Yjs provider.
+3. The WebSocket server shall coexist with the existing Socket.IO server on the same HTTP server instance without port conflicts.
+4. The Yjs Service shall support `resyncInterval` (periodic state re-synchronization) to recover from any missed updates.
+
+### Requirement 3: Authentication and Authorization
+
+**Objective:** As a system administrator, I want WebSocket connections for collaborative editing to be authenticated and authorized, so that only permitted users can access page content via the Yjs channel.
+
+#### Acceptance Criteria
+
+1. When a WebSocket upgrade request is received for collaborative editing, the Yjs Service shall authenticate the user using the existing session/passport mechanism.
+2. When an authenticated user attempts to connect to a page's Yjs document, the Yjs Service shall verify that the user has read access to that page before allowing the connection.
+3. If an unauthenticated or unauthorized WebSocket upgrade request is received, the Yjs Service shall reject the connection with an appropriate HTTP error status.
+4. Where guest access is enabled for a page, the Yjs Service shall allow guest users to connect to that page's collaborative editing session.
+
+### Requirement 4: MongoDB Persistence
+
+**Objective:** As a system operator, I want the Yjs persistence layer to use MongoDB storage, so that draft state is preserved across server restarts and client reconnections.
+
+#### Acceptance Criteria
+
+1. The Yjs Service shall use the `yjs-writings` MongoDB collection for document persistence.
+2. The Yjs Service shall use the `MongodbPersistence` implementation (extended `y-mongodb-provider`).
+3. When a Y.Doc is loaded from persistence, the Yjs Service shall apply the persisted state before sending sync messages to connecting clients.
+4. When a Y.Doc receives updates, the Yjs Service shall persist each update to MongoDB with an `updatedAt` timestamp.
+5. When all clients disconnect from a document, the Yjs Service shall flush the document state to MongoDB before destroying the in-memory instance.
+
+### Requirement 5: Awareness and Presence Tracking
+
+**Objective:** As a wiki user, I want to see which other users are currently editing the same page, so that I can coordinate edits and avoid conflicts.
+
+#### Acceptance Criteria
+
+1. While a user is editing a page, the Editor Client shall broadcast the user's presence information (name, username, avatar, cursor color) via the Yjs awareness protocol.
+2. When a user connects or disconnects from a collaborative editing session, the Yjs Service shall emit awareness state size updates to the page's Socket.IO room (`page:{pageId}`).
+3. When the last user disconnects from a document, the Yjs Service shall emit a draft status notification (`YjsHasYdocsNewerThanLatestRevisionUpdated`) to the page's Socket.IO room.
+4. The Editor Client shall display the list of active editors based on awareness state updates from the Yjs provider.
+
+### Requirement 6: YDoc Status and Sync Integration
+
+**Objective:** As a system component, I want the YDoc status detection and force-sync mechanisms to function correctly, so that draft detection, save operations, and revision synchronization work as expected.
+
+#### Acceptance Criteria
+
+1. The Yjs Service shall expose `getYDocStatus(pageId)` returning the correct status (ISOLATED, NEW, DRAFT, SYNCED, OUTDATED).
+2. The Yjs Service shall expose `getCurrentYdoc(pageId)` returning the in-memory Y.Doc instance if one exists.
+3. When a Y.Doc is loaded from persistence (within `bindState`), the Yjs Service shall call `syncYDoc` to synchronize the document with the latest revision based on YDoc status.
+4. The Yjs Service shall expose `syncWithTheLatestRevisionForce(pageId)` for API-triggered force synchronization.

+ 69 - 0
.kiro/specs/collaborative-editor/research.md

@@ -0,0 +1,69 @@
+# Research & Design Decisions
+
+## Summary
+- **Feature**: `collaborative-editor`
+- **Key Findings**:
+  - y-websocket uses atomic `map.setIfUndefined` for document creation — eliminates TOCTOU race conditions
+  - `y-websocket@2.x` bundles both client and server utils with `yjs@^13` compatibility
+  - `ws` package already installed in GROWI; Express HTTP server supports adding WebSocket upgrade alongside Socket.IO
+
+## Design Decisions
+
+### Decision: Use y-websocket@2.x for both client and server
+
+- **Context**: Need yjs v13 compatibility on both client and server sides
+- **Alternatives Considered**:
+  1. y-websocket@3.x client + custom server — more work, v3 SyncStatus not needed
+  2. y-websocket@3.x + @y/websocket-server — requires yjs v14 migration (out of scope)
+  3. y-websocket@2.x for everything — simplest path, proven code
+- **Selected**: Option 3 — `y-websocket@2.x`
+- **Rationale**: Minimizes custom code, proven server utils, compatible with yjs v13, clear upgrade path to v3 + @y/websocket-server when yjs v14 migration happens
+- **Trade-offs**: Miss v3 SyncStatus feature, but `sync` event + `resyncInterval` meets all requirements
+- **Follow-up**: Plan separate yjs v14 migration, then upgrade to y-websocket v3 + @y/websocket-server
+
+### Decision: WebSocket path prefix `/yjs/`
+
+- **Context**: Need URL pattern that doesn't conflict with Socket.IO
+- **Selected**: `/yjs/{pageId}`
+- **Rationale**: Simple, semantic, no conflict with Socket.IO's `/socket.io/` path or Express routes
+
+### Decision: Session-based authentication on WebSocket upgrade
+
+- **Context**: Must authenticate WebSocket connections without Socket.IO middleware
+- **Selected**: Parse session cookie from HTTP upgrade request, deserialize user from session store
+- **Rationale**: Reuses existing session infrastructure — same cookie, same store, same passport serialization
+- **Trade-offs**: Couples to express-session internals, but GROWI already has this coupling throughout
+
+### Decision: Keep Socket.IO for awareness event fan-out
+
+- **Context**: GROWI uses Socket.IO rooms (`page:{pageId}`) to broadcast awareness updates to non-editor components
+- **Selected**: Continue using Socket.IO `io.in(roomName).emit()` for awareness events, bridging from y-websocket awareness
+- **Rationale**: Non-editor UI components already listen on Socket.IO rooms; changing this is out of scope
+
+## Critical Implementation Constraints
+
+### engine.io `destroyUpgrade` setting
+
+Socket.IO's engine.io v6 defaults `destroyUpgrade: true` in its `attach()` method. This causes engine.io to destroy all non-Socket.IO upgrade requests after a 1-second timeout. The Socket.IO server **must** be configured with `destroyUpgrade: false` to allow `/yjs/` WebSocket handshakes to succeed.
+
+### Next.js upgradeHandler race condition (guardSocket pattern)
+
+Next.js's `NextCustomServer.upgradeHandler` registers an `upgrade` listener on the HTTP server. When the Yjs async handler yields at its first `await`, Next.js's synchronous handler runs and calls `socket.end()` for unrecognized paths. The `guardSocket` pattern temporarily replaces `socket.end()`/`socket.destroy()` with no-ops before the first `await`, restoring them after auth completes.
+
+- `prependListener` cannot solve this — it only changes listener order, cannot prevent subsequent listeners from executing
+- Removing Next.js's listener is fragile and breaks HMR
+- Synchronous auth is impossible (requires async MongoDB/session store queries)
+
+### React render-phase violation in use-collaborative-editor-mode
+
+Provider creation and awareness event handlers must be placed **outside** `setProvider(() => { ... })` functional state updaters. If inside, `awareness.setLocalStateField()` triggers synchronous awareness events that update other components during render. All side effects go in the `useEffect` body; `setProvider(_provider)` is called with a plain value.
+
+### y-websocket bindState ordering
+
+y-websocket does NOT await `bindState` before sending sync messages. However, within `bindState` itself, the ordering is guaranteed: persistence load → YDocStatus check → syncYDoc → awareness registration. This consolidation is intentional.
+
+## References
+- [y-websocket GitHub](https://github.com/yjs/y-websocket)
+- [y-websocket-server GitHub](https://github.com/yjs/y-websocket-server) (yjs v14, future migration target)
+- [ws npm](https://www.npmjs.com/package/ws)
+- [y-mongodb-provider](https://github.com/MaxNoetzold/y-mongodb-provider)

+ 22 - 0
.kiro/specs/collaborative-editor/spec.json

@@ -0,0 +1,22 @@
+{
+  "feature_name": "collaborative-editor",
+  "created_at": "2026-03-19T00:00:00.000Z",
+  "updated_at": "2026-03-24T00:00:00.000Z",
+  "language": "en",
+  "phase": "active",
+  "approvals": {
+    "requirements": {
+      "generated": true,
+      "approved": true
+    },
+    "design": {
+      "generated": true,
+      "approved": true
+    },
+    "tasks": {
+      "generated": true,
+      "approved": true
+    }
+  },
+  "ready_for_implementation": true
+}

+ 3 - 0
.kiro/specs/collaborative-editor/tasks.md

@@ -0,0 +1,3 @@
+# Implementation Plan
+
+No pending tasks. Use `/kiro:spec-tasks collaborative-editor` to generate tasks for new work.

+ 153 - 0
.kiro/specs/hotkeys/design.md

@@ -0,0 +1,153 @@
+# Technical Design
+
+## Architecture Overview
+
+The GROWI hotkey system manages keyboard shortcuts globally. It uses `tinykeys` (~400 byte) as the key binding engine and a **subscriber component pattern** to execute actions when hotkeys fire.
+
+### Component Diagram
+
+```
+BasicLayout / AdminLayout
+  └─ HotkeysManager (loaded via next/dynamic, ssr: false)
+       ├─ tinykeys(window, bindings) — registers all key bindings
+       └─ renders subscriber components on demand:
+            ├─ EditPage
+            ├─ CreatePage
+            ├─ FocusToGlobalSearch
+            ├─ ShowShortcutsModal
+            ├─ ShowStaffCredit
+            └─ SwitchToMirrorMode
+```
+
+### Key Files
+
+| File | Role |
+|------|------|
+| `src/client/components/Hotkeys/HotkeysManager.tsx` | Core orchestrator — binds all keys via tinykeys, renders subscribers |
+| `src/client/components/Hotkeys/Subscribers/*.tsx` | Individual action handlers rendered when their hotkey fires |
+| `src/components/Layout/BasicLayout.tsx` | Mounts HotkeysManager via `next/dynamic({ ssr: false })` |
+| `src/components/Layout/AdminLayout.tsx` | Mounts HotkeysManager via `next/dynamic({ ssr: false })` |
+
+## Design Decisions
+
+### D1: tinykeys as Binding Engine
+
+**Decision**: Use `tinykeys` (v3) instead of `react-hotkeys` (v2).
+
+**Rationale**:
+- `react-hotkeys` contributes 91 modules to async chunks; `tinykeys` is 1 module (~400 bytes)
+- tinykeys natively supports single keys, modifier combos (`Control+/`), and multi-key sequences (`ArrowUp ArrowUp ...`)
+- No need for custom state machine (`HotkeyStroke`) or detection wrapper (`HotkeysDetector`)
+
+**Trade-off**: tinykeys has no React integration — key binding is done imperatively in a `useEffect` hook rather than declaratively via JSX props. This is acceptable given the simplicity of the binding map.
+
+### D2: Subscriber-Owned Binding Definitions
+
+**Decision**: Each subscriber component exports its own `hotkeyBindings` metadata alongside its React component. `HotkeysManager` imports these definitions and auto-builds the tinykeys binding map — it never hardcodes specific keys or subscriber references.
+
+**Rationale**:
+- True "1 module = 1 hotkey" encapsulation: each subscriber owns its key binding, handler category, and action logic
+- Adding a new hotkey requires creating only one file (the new subscriber); `HotkeysManager` needs no modification
+- Fully satisfies Req 7 AC 2 ("define hotkey without modifying core detection logic")
+- Self-documenting: looking at a subscriber file tells you everything about that hotkey
+
+**Type contract**:
+```typescript
+// Shared type definition in HotkeysManager.tsx or a shared types file
+type HotkeyCategory = 'single' | 'modifier';
+
+type HotkeyBindingDef = {
+  keys: string | string[];   // tinykeys key expression(s)
+  category: HotkeyCategory;  // determines handler wrapper (single = input guard, modifier = no guard)
+};
+
+type HotkeySubscriber = {
+  component: React.ComponentType<{ onDeleteRender: () => void }>;
+  bindings: HotkeyBindingDef;
+};
+```
+
+**Subscriber example**:
+```typescript
+// CreatePage.tsx
+export const hotkeyBindings: HotkeyBindingDef = {
+  keys: 'c',
+  category: 'single',
+};
+
+export const CreatePage = ({ onDeleteRender }: Props): null => { /* ... */ };
+```
+
+```typescript
+// ShowShortcutsModal.tsx
+export const hotkeyBindings: HotkeyBindingDef = {
+  keys: ['Control+/', 'Meta+/'],
+  category: 'modifier',
+};
+```
+
+**HotkeysManager usage**:
+```typescript
+// HotkeysManager.tsx
+import * as createPage from './Subscribers/CreatePage';
+import * as editPage from './Subscribers/EditPage';
+// ... other subscribers
+
+const subscribers: HotkeySubscriber[] = [
+  { component: createPage.CreatePage, bindings: createPage.hotkeyBindings },
+  { component: editPage.EditPage, bindings: editPage.hotkeyBindings },
+  // ...
+];
+
+// In useEffect: iterate subscribers to build tinykeys binding map
+```
+
+**Trade-off**: Slightly more structure than a plain object literal, but the pattern is minimal and each subscriber file is fully self-contained.
+
+### D3: Subscriber Render-on-Fire Pattern
+
+**Decision**: Subscriber components are rendered into the React tree only when their hotkey fires, and self-remove after executing their action.
+
+**Rationale**:
+- Preserves the existing GROWI pattern where hotkey actions need access to React hooks (Jotai atoms, SWR, i18n, routing)
+- Components call `onDeleteRender()` after completing their effect to clean up
+- Uses a monotonically incrementing key ref to avoid React key collisions
+
+### D4: Two Handler Categories
+
+**Decision**: `singleKeyHandler` and `modifierKeyHandler` are separated.
+
+**Rationale**:
+- Single-key shortcuts (`e`, `c`, `/`) must be suppressed when the user is typing in input/textarea/contenteditable elements
+- Modifier-key shortcuts (`Control+/`, `Meta+/`) and multi-key sequences should fire regardless of focus, as they are unlikely to conflict with text entry
+- `isEditableTarget()` check is applied only to single-key handlers
+
+### D5: Client-Only Loading
+
+**Decision**: HotkeysManager is loaded via `next/dynamic({ ssr: false })`.
+
+**Rationale**:
+- Keyboard events are client-only; no SSR rendering is needed
+- Dynamic import keeps hotkey modules out of initial server-rendered chunks
+- Both BasicLayout and AdminLayout follow this pattern
+
+## Implementation Deviations from Requirements
+
+| Requirement | Deviation | Justification |
+|-------------|-----------|---------------|
+| Req 8 AC 2: "export typed interfaces for hotkey definitions" | `HotkeyBindingDef` and `HotkeySubscriber` types are exported for subscriber use but not published as a package API | These types are internal to the Hotkeys module; no external consumers need them |
+
+> **Note (task 5)**: Req 8 AC 1 is now fully satisfied — all 6 subscriber components converted from `.jsx` to `.tsx` with TypeScript `Props` types and named exports.
+> **Note (D2 revision)**: Req 7 AC 2 is now fully satisfied — subscriber-owned binding definitions mean adding a hotkey requires only creating a new subscriber file.
+
+## Key Binding Format (tinykeys)
+
+| Category | Format | Example |
+|----------|--------|---------|
+| Single key | `"key"` | `e`, `c`, `"/"` |
+| Modifier combo | `"Modifier+key"` | `"Control+/"`, `"Meta+/"` |
+| Multi-key sequence | `"key1 key2 key3 ..."` (space-separated) | `"ArrowUp ArrowUp ArrowDown ArrowDown ..."` |
+| Platform modifier | `"$mod+key"` | `"$mod+/"` (Control on Windows/Linux, Meta on macOS) |
+
+> Note: The current implementation uses explicit `Control+/` and `Meta+/` rather than `$mod+/` to match the original behavior.
+

+ 101 - 0
.kiro/specs/hotkeys/requirements.md

@@ -0,0 +1,101 @@
+# Requirements Document
+
+## Introduction
+
+GROWI currently uses `react-hotkeys` (v2.0.0, 91 modules in async chunk) to manage keyboard shortcuts via a custom subscriber pattern. The library is identified as an optimization target due to its module footprint. This specification covers the migration from `react-hotkeys` to `tinykeys`, a lightweight (~400B) keyboard shortcut library, while preserving all existing hotkey functionality and the subscriber-based architecture.
+
+### Current Architecture Overview
+
+- **HotkeysDetector**: Wraps `react-hotkeys`'s `GlobalHotKeys` to capture key events and convert them to custom key expressions
+- **HotkeyStroke**: State machine model for multi-key sequence detection (e.g., Konami codes)
+- **HotkeysManager**: Orchestrator that maps strokes to subscriber components and manages their lifecycle
+- **Subscribers**: 6 components (CreatePage, EditPage, FocusToGlobalSearch, ShowShortcutsModal, ShowStaffCredit, SwitchToMirrorMode) that self-define hotkeys via static `getHotkeyStrokes()`
+
+### Registered Hotkeys
+
+| Shortcut | Action |
+|----------|--------|
+| `c` | Open page creation modal |
+| `e` | Start page editing |
+| `/` | Focus global search |
+| `Ctrl+/` or `Meta+/` | Open shortcuts help modal |
+| `↑↑↓↓←→←→BA` | Show staff credits (Konami code) |
+| `XXBBAAYYA↓←` | Switch to mirror mode (Konami code) |
+
+## Requirements
+
+### Requirement 1: Replace react-hotkeys Dependency with tinykeys
+
+**Objective:** As a developer, I want to replace `react-hotkeys` with `tinykeys`, so that the application's async chunk module count is reduced and the hotkey system uses a modern, lightweight library.
+
+#### Acceptance Criteria
+
+1. The GROWI application shall use `tinykeys` as the keyboard shortcut library instead of `react-hotkeys`.
+2. When the migration is complete, the `react-hotkeys` package shall be removed from `package.json` dependencies.
+3. The GROWI application shall not increase the total async chunk module count compared to the current `react-hotkeys` implementation.
+
+### Requirement 2: Preserve Single-Key Shortcut Functionality
+
+**Objective:** As a user, I want single-key shortcuts to continue working after the migration, so that my workflow is not disrupted.
+
+#### Acceptance Criteria
+
+1. When the user presses the `c` key (outside an input/textarea/editable element), the Hotkeys system shall open the page creation modal.
+2. When the user presses the `e` key (outside an input/textarea/editable element), the Hotkeys system shall start page editing if the page is editable and no modal is open.
+3. When the user presses the `/` key (outside an input/textarea/editable element), the Hotkeys system shall open the global search modal.
+
+### Requirement 3: Preserve Modifier-Key Shortcut Functionality
+
+**Objective:** As a user, I want modifier-key shortcuts to continue working after the migration, so that keyboard shortcut help remains accessible.
+
+#### Acceptance Criteria
+
+1. When the user presses `Ctrl+/` (or `Meta+/` on macOS), the Hotkeys system shall open the shortcuts help modal.
+
+### Requirement 4: Preserve Multi-Key Sequence (Konami Code) Functionality
+
+**Objective:** As a user, I want multi-key sequences (Konami codes) to continue working after the migration, so that easter egg features remain accessible.
+
+#### Acceptance Criteria
+
+1. When the user enters the key sequence `↑↑↓↓←→←→BA`, the Hotkeys system shall show the staff credits modal.
+2. When the user enters the key sequence `XXBBAAYYA↓←`, the Hotkeys system shall apply the mirror mode CSS class to the document body.
+3. While a multi-key sequence is in progress, the Hotkeys system shall track partial matches and reset if an incorrect key is pressed.
+
+### Requirement 5: Input Element Focus Guard
+
+**Objective:** As a user, I want single-key shortcuts to not fire when I am typing in an input field, so that keyboard shortcuts do not interfere with text entry.
+
+#### Acceptance Criteria
+
+1. While an `<input>`, `<textarea>`, or `contenteditable` element is focused, the Hotkeys system shall suppress single-key shortcuts (e.g., `c`, `e`, `/`).
+2. While an `<input>`, `<textarea>`, or `contenteditable` element is focused, the Hotkeys system shall still allow modifier-key shortcuts (e.g., `Ctrl+/`).
+
+### Requirement 6: Lifecycle Management and Cleanup
+
+**Objective:** As a developer, I want hotkey bindings to be properly registered and cleaned up on component mount/unmount, so that there are no memory leaks or stale handlers.
+
+#### Acceptance Criteria
+
+1. When a layout component (BasicLayout or AdminLayout) mounts, the Hotkeys system shall register all hotkey bindings.
+2. When a layout component unmounts, the Hotkeys system shall unsubscribe all hotkey bindings.
+3. The Hotkeys system shall provide a cleanup mechanism compatible with React's `useEffect` return pattern.
+
+### Requirement 7: Maintain Subscriber Component Architecture
+
+**Objective:** As a developer, I want the subscriber-based architecture to be preserved or appropriately modernized, so that adding or modifying hotkeys remains straightforward.
+
+#### Acceptance Criteria
+
+1. The Hotkeys system shall support a pattern where each hotkey action is defined as an independent unit (component or handler) with its own key binding definition.
+2. When a new hotkey action is added, the developer shall be able to define it without modifying the core hotkey detection logic.
+3. The Hotkeys system shall support dynamic rendering of subscriber components when their associated hotkey fires.
+
+### Requirement 8: TypeScript Migration
+
+**Objective:** As a developer, I want the migrated hotkey system to use TypeScript, so that the code benefits from type safety and better IDE support.
+
+#### Acceptance Criteria
+
+1. The Hotkeys system shall be implemented in TypeScript (`.ts`/`.tsx` files) rather than JavaScript (`.js`/`.jsx`).
+2. The Hotkeys system shall export typed interfaces for hotkey definitions and handler signatures.

+ 23 - 0
.kiro/specs/hotkeys/spec.json

@@ -0,0 +1,23 @@
+{
+  "feature_name": "hotkeys",
+  "created_at": "2026-02-20T00:00:00.000Z",
+  "updated_at": "2026-02-24T12:00:00.000Z",
+  "language": "en",
+  "phase": "implementation-complete",
+  "approvals": {
+    "requirements": {
+      "generated": true,
+      "approved": true
+    },
+    "design": {
+      "generated": true,
+      "approved": true
+    },
+    "tasks": {
+      "generated": true,
+      "approved": true
+    }
+  },
+  "ready_for_implementation": true,
+  "cleanup_completed": true
+}

+ 29 - 0
.kiro/specs/hotkeys/tasks.md

@@ -0,0 +1,29 @@
+# Implementation Tasks
+
+## Summary
+
+All tasks completed. Migrated from `react-hotkeys` to `tinykeys` with subscriber-owned binding definitions and full TypeScript conversion.
+
+| Task | Description | Requirements |
+|------|-------------|--------------|
+| 1 | Write HotkeysManager tests (TDD) | 2, 3, 5 |
+| 2 | Rewrite HotkeysManager with tinykeys | 1, 2, 3, 4, 5, 6, 8 |
+| 3 | Remove legacy hotkey infrastructure | 1, 7 |
+| 4 | Verify quality and module reduction (-92 modules) | 1 |
+| 5 | Convert 4 JSX subscribers to TypeScript, fix bugs, unify patterns | 7, 8 |
+| 6.1 | Define shared types, add binding exports to all subscribers | 7, 8 |
+| 6.2 | Refactor HotkeysManager to build binding map from subscriber exports | 6, 7 |
+| 7 | Verify refactoring preserves all existing behavior | 1, 2, 3, 4, 5 |
+
+## Requirements Coverage
+
+| Requirement | Tasks |
+|-------------|-------|
+| 1. Replace react-hotkeys with tinykeys | 2, 3, 4, 7 |
+| 2. Preserve single-key shortcuts | 1, 2, 7 |
+| 3. Preserve modifier-key shortcuts | 1, 2, 7 |
+| 4. Preserve multi-key sequences | 2, 7 |
+| 5. Input element focus guard | 1, 2, 7 |
+| 6. Lifecycle management and cleanup | 2, 6.2 |
+| 7. Subscriber component architecture | 3, 5, 6.1, 6.2 |
+| 8. TypeScript migration | 2, 5, 6.1 |

+ 764 - 0
.kiro/specs/oauth2-email-support/design.md

@@ -0,0 +1,764 @@
+# OAuth 2.0 Email Support - Technical Design
+
+## Overview
+
+This feature adds OAuth 2.0 authentication support for sending emails through Google Workspace accounts in GROWI. Administrators can configure email transmission using OAuth 2.0 credentials (Client ID, Client Secret, Refresh Token) instead of traditional SMTP passwords. This integration extends the existing mail service architecture while maintaining full backward compatibility with SMTP and SES configurations.
+
+**Purpose**: Enable secure, token-based email authentication for Google Workspace accounts, improving security by eliminating password-based SMTP authentication and following Google's recommended practices for application email integration.
+
+**Users**: GROWI administrators configuring email transmission settings will use the new OAuth 2.0 option alongside existing SMTP and SES methods.
+
+**Impact**: Extends the mail service to support a third transmission method (oauth2) without modifying existing SMTP or SES functionality. No breaking changes to existing deployments.
+
+### Goals
+
+- Add OAuth 2.0 as a transmission method option in mail settings
+- Support Google Workspace email sending via Gmail API with OAuth 2.0 credentials
+- Maintain backward compatibility with existing SMTP and SES configurations
+- Provide consistent admin UI experience following SMTP/SES patterns
+- Implement automatic OAuth 2.0 token refresh using nodemailer's built-in support
+- Ensure secure storage and handling of OAuth 2.0 credentials
+
+### Non-Goals
+
+- OAuth 2.0 providers beyond Google Workspace (Microsoft 365, generic OAuth 2.0 servers)
+- Migration tool from SMTP to OAuth 2.0 (administrators manually reconfigure)
+- Authorization flow UI for obtaining refresh tokens (documented external process via Google Cloud Console)
+- Multi-account or account rotation support (single OAuth 2.0 account per instance)
+- Email queuing or rate limiting specific to OAuth 2.0 (relies on existing mail service behavior)
+
+## Architecture
+
+### Existing Architecture Analysis
+
+**Current Mail Service Implementation**:
+- **Service Location**: `apps/app/src/server/service/mail.ts` (MailService class)
+- **Initialization**: MailService instantiated from Crowi container, loaded on app startup
+- **Transmission Methods**: Currently supports 'smtp' and 'ses' via `mail:transmissionMethod` config
+- **Factory Pattern**: `createSMTPClient()` and `createSESClient()` create nodemailer transports
+- **Configuration**: ConfigManager loads settings from MongoDB via `mail:*` namespace keys
+- **S2S Messaging**: Supports distributed config updates via `mailServiceUpdated` events
+- **Test Email**: SMTP-only test email functionality in admin UI
+
+**Current Admin UI Structure**:
+- **Main Component**: `MailSetting.tsx` - form container with transmission method radio buttons
+- **Sub-Components**: `SmtpSetting.tsx`, `SesSetting.tsx` - conditional rendering based on selected method
+- **State Management**: AdminAppContainer (unstated) manages form state and API calls
+- **Form Library**: react-hook-form for validation and submission
+- **API Integration**: `updateMailSettingHandler()` saves all mail settings via REST API
+
+**Integration Points**:
+- Config definition in `config-definition.ts` (add OAuth 2.0 keys)
+- MailService initialize() method (add OAuth 2.0 branch)
+- MailSetting.tsx transmission method array (add 'oauth2' option)
+- AdminAppContainer state methods (add OAuth 2.0 credential methods)
+
+### Architecture Pattern & Boundary Map
+
+```mermaid
+graph TB
+    subgraph "Client Layer"
+        MailSettingUI[MailSetting Component]
+        OAuth2SettingUI[OAuth2Setting Component]
+        SmtpSettingUI[SmtpSetting Component]
+        SesSettingUI[SesSetting Component]
+        AdminContainer[AdminAppContainer]
+    end
+
+    subgraph "API Layer"
+        AppSettingsAPI[App Settings API]
+        MailTestAPI[Mail Test API]
+    end
+
+    subgraph "Service Layer"
+        MailService[MailService]
+        ConfigManager[ConfigManager]
+        S2SMessaging[S2S Messaging]
+    end
+
+    subgraph "External Services"
+        GoogleOAuth[Google OAuth 2.0 API]
+        GmailAPI[Gmail API]
+        SMTPServer[SMTP Server]
+        SESAPI[AWS SES API]
+    end
+
+    subgraph "Data Layer"
+        MongoDB[(MongoDB Config)]
+    end
+
+    MailSettingUI --> AdminContainer
+    OAuth2SettingUI --> AdminContainer
+    SmtpSettingUI --> AdminContainer
+    SesSettingUI --> AdminContainer
+
+    AdminContainer --> AppSettingsAPI
+    AdminContainer --> MailTestAPI
+
+    AppSettingsAPI --> ConfigManager
+    MailTestAPI --> MailService
+
+    MailService --> ConfigManager
+    MailService --> S2SMessaging
+
+    ConfigManager --> MongoDB
+
+    MailService --> GoogleOAuth
+    MailService --> GmailAPI
+    MailService --> SMTPServer
+    MailService --> SESAPI
+
+    S2SMessaging -.->|mailServiceUpdated| MailService
+```
+
+**Architecture Integration**:
+- **Selected Pattern**: Factory Method Extension - adds `createOAuth2Client()` to existing MailService factory methods
+- **Domain Boundaries**:
+  - **Client**: Admin UI components for OAuth 2.0 configuration (follows existing SmtpSetting/SesSetting pattern)
+  - **Service**: MailService handles all transmission methods; OAuth 2.0 isolated in new factory method
+  - **Config**: ConfigManager persists OAuth 2.0 credentials using `mail:oauth2*` namespace
+  - **External**: Google OAuth 2.0 API for token management; Gmail API for email transmission
+- **Existing Patterns Preserved**:
+  - Transmission method selection pattern (radio buttons, conditional rendering)
+  - Factory method pattern for transport creation
+  - Config namespace pattern (`mail:*` keys)
+  - Unstated container state management
+  - S2S messaging for distributed config updates
+- **New Components Rationale**:
+  - **OAuth2Setting Component**: Maintains UI consistency with SMTP/SES; enables modular development
+  - **createOAuth2Client() Method**: Isolates OAuth 2.0 transport logic; follows existing factory pattern
+  - **Four Config Keys**: Minimal set for OAuth 2.0 (user, clientId, clientSecret, refreshToken)
+- **Steering Compliance**:
+  - Feature-based organization (mail service domain)
+  - Named exports throughout
+  - Type safety with explicit TypeScript interfaces
+  - Immutable config updates
+  - Security-first credential handling
+
+### Technology Stack
+
+| Layer | Choice / Version | Role in Feature | Notes |
+|-------|------------------|-----------------|-------|
+| Frontend | React 18.x + TypeScript | OAuth2Setting UI component | Existing stack, no new dependencies |
+| Frontend | react-hook-form | Form validation and state | Existing dependency, consistent with SmtpSetting/SesSetting |
+| Backend | Node.js + TypeScript | MailService OAuth 2.0 integration | Existing runtime, no version changes |
+| Backend | nodemailer 6.x | OAuth 2.0 transport creation | Existing dependency with built-in OAuth 2.0 support |
+| Data | MongoDB | Config storage for OAuth 2.0 credentials | Existing database, new config keys only |
+| External | Google OAuth 2.0 API | Token refresh endpoint | Standard Google API, https://oauth2.googleapis.com/token |
+| External | Gmail API | Email transmission via OAuth 2.0 | Accessed via nodemailer Gmail transport |
+
+**Key Technology Decisions**:
+- **Nodemailer OAuth 2.0**: Built-in support eliminates need for additional OAuth 2.0 libraries; automatic token refresh reduces complexity
+- **No New Dependencies**: Feature fully implemented with existing packages; zero dependency risk
+- **MongoDB Encryption**: Credentials stored using existing ConfigManager encryption (same as SMTP passwords)
+- **Gmail Service Shortcut**: Nodemailer's `service: "gmail"` simplifies configuration and handles Gmail API specifics
+
+## System Flows
+
+### OAuth 2.0 Configuration Flow
+
+```mermaid
+sequenceDiagram
+    participant Admin as Administrator
+    participant UI as MailSetting UI
+    participant Container as AdminAppContainer
+    participant API as App Settings API
+    participant Config as ConfigManager
+    participant DB as MongoDB
+
+    Admin->>UI: Select "oauth2" transmission method
+    UI->>UI: Render OAuth2Setting component
+    Admin->>UI: Enter OAuth 2.0 credentials
+    Admin->>UI: Click Update button
+    UI->>Container: handleSubmit formData
+    Container->>API: POST app-settings
+    API->>API: Validate OAuth 2.0 fields
+    alt Validation fails
+        API-->>Container: 400 Bad Request
+        Container-->>UI: Display error toast
+    else Validation passes
+        API->>Config: setConfig mail:oauth2*
+        Config->>DB: Save encrypted credentials
+        DB-->>Config: Success
+        Config-->>API: Success
+        API-->>Container: 200 OK
+        Container-->>UI: Display success toast
+    end
+```
+
+### Email Sending with OAuth 2.0 Flow
+
+```mermaid
+sequenceDiagram
+    participant App as GROWI Application
+    participant Mail as MailService
+    participant Nodemailer as Nodemailer Transport
+    participant Google as Google OAuth 2.0 API
+    participant Gmail as Gmail API
+
+    App->>Mail: send emailConfig
+    Mail->>Mail: Check mailer setup
+    alt Mailer not setup
+        Mail-->>App: Error Mailer not set up
+    else Mailer setup oauth2
+        Mail->>Nodemailer: sendMail mailConfig
+        Nodemailer->>Nodemailer: Check access token validity
+        alt Access token expired
+            Nodemailer->>Google: POST token refresh
+            Google-->>Nodemailer: New access token
+            Nodemailer->>Nodemailer: Cache access token
+        end
+        Nodemailer->>Gmail: POST send message
+        alt Authentication failure
+            Gmail-->>Nodemailer: 401 Unauthorized
+            Nodemailer-->>Mail: Error Invalid credentials
+            Mail-->>App: Error with OAuth 2.0 details
+        else Success
+            Gmail-->>Nodemailer: 200 OK message ID
+            Nodemailer-->>Mail: Success
+            Mail->>Mail: Log transmission success
+            Mail-->>App: Email sent successfully
+        end
+    end
+```
+
+**Flow-Level Decisions**:
+- **Token Refresh**: Handled entirely by nodemailer; MailService does not implement custom refresh logic
+- **Error Handling**: OAuth 2.0 errors logged with specific Google API error codes for admin troubleshooting
+- **Credential Validation**: Performed at API layer before persisting to database; prevents invalid config states
+- **S2S Sync**: OAuth 2.0 config changes trigger `mailServiceUpdated` event for distributed deployments (existing pattern)
+
+## Requirements Traceability
+
+| Requirement | Summary | Components | Interfaces | Flows |
+|-------------|---------|------------|------------|-------|
+| 1.1 | Add OAuth 2.0 transmission method option | MailSetting.tsx, config-definition.ts | ConfigDefinition | Configuration |
+| 1.2 | Display OAuth 2.0 config fields when selected | OAuth2Setting.tsx, MailSetting.tsx | React Props | Configuration |
+| 1.3 | Validate email address format | AdminAppContainer, App Settings API | API Contract | Configuration |
+| 1.4 | Validate non-empty OAuth 2.0 credentials | AdminAppContainer, App Settings API | API Contract | Configuration |
+| 1.5 | Securely store OAuth 2.0 credentials with encryption | ConfigManager, MongoDB | Data Model | Configuration |
+| 1.6 | Confirm successful configuration save | AdminAppContainer, MailSetting.tsx | API Contract | Configuration |
+| 1.7 | Display descriptive error messages on save failure | AdminAppContainer, MailSetting.tsx | API Contract | Configuration |
+| 2.1 | Use nodemailer Gmail OAuth 2.0 transport | MailService.createOAuth2Client() | Service Interface | Email Sending |
+| 2.2 | Authenticate to Gmail API with OAuth 2.0 | MailService.createOAuth2Client() | External API | Email Sending |
+| 2.3 | Set FROM address to configured email | MailService.setupMailConfig() | Service Interface | Email Sending |
+| 2.4 | Log successful email transmission | MailService.send() | Service Interface | Email Sending |
+| 2.5 | Support all email content types | MailService.send() (existing) | Service Interface | Email Sending |
+| 2.6 | Process email queue sequentially | MailService.send() (existing) | Service Interface | Email Sending |
+| 3.1 | Use nodemailer automatic token refresh | Nodemailer OAuth 2.0 transport | External Library | Email Sending |
+| 3.2 | Request new access token with refresh token | Nodemailer OAuth 2.0 transport | External API | Email Sending |
+| 3.3 | Continue email sending after token refresh | Nodemailer OAuth 2.0 transport | External Library | Email Sending |
+| 3.4 | Log error and notify admin on refresh failure | MailService.send(), Error Handler | Service Interface | Email Sending |
+| 3.5 | Cache access tokens in memory | Nodemailer OAuth 2.0 transport | External Library | Email Sending |
+| 3.6 | Invalidate cached tokens on config update | MailService.initialize() | Service Interface | Configuration |
+| 4.1 | Display OAuth 2.0 form with consistent styling | OAuth2Setting.tsx | React Component | Configuration |
+| 4.2 | Preserve OAuth 2.0 credentials when switching methods | AdminAppContainer state | State Management | Configuration |
+| 4.3 | Provide field-level help text | OAuth2Setting.tsx | React Component | Configuration |
+| 4.4 | Mask sensitive fields (last 4 characters) | OAuth2Setting.tsx | React Component | Configuration |
+| 4.5 | Provide test email button | MailSetting.tsx | API Contract | Email Sending |
+| 4.6 | Display test email result with detailed errors | AdminAppContainer, MailSetting.tsx | API Contract | Email Sending |
+| 5.1 | Log specific OAuth 2.0 error codes | MailService error handler | Service Interface | Email Sending |
+| 5.2 | Retry email sending with exponential backoff | MailService.send() | Service Interface | Email Sending |
+| 5.3 | Store failed emails after all retries | MailService.send() | Service Interface | Email Sending |
+| 5.4 | Never log credentials in plain text | MailService, ConfigManager | Security Pattern | All flows |
+| 5.5 | Require admin authentication for config page | App Settings API | API Contract | Configuration |
+| 5.6 | Stop OAuth 2.0 sending when credentials deleted | MailService.initialize() | Service Interface | Email Sending |
+| 5.7 | Validate SSL/TLS for OAuth 2.0 endpoints | Nodemailer OAuth 2.0 transport | External Library | Email Sending |
+| 6.1 | Maintain backward compatibility with SMTP/SES | MailService, config-definition.ts | All Interfaces | All flows |
+| 6.2 | Use only active transmission method | MailService.initialize() | Service Interface | Email Sending |
+| 6.3 | Allow switching transmission methods without data loss | AdminAppContainer, ConfigManager | State Management | Configuration |
+| 6.4 | Display configuration error if no method set | MailService, MailSetting.tsx | Service Interface | Configuration |
+| 6.5 | Expose OAuth 2.0 status via admin API | App Settings API | API Contract | Configuration |
+
+## Components and Interfaces
+
+### Component Summary
+
+| Component | Domain/Layer | Intent | Req Coverage | Key Dependencies (P0/P1) | Contracts |
+|-----------|--------------|--------|--------------|--------------------------|-----------|
+| MailService | Server/Service | Email transmission with OAuth 2.0 support | 2.1-2.6, 3.1-3.6, 5.1-5.7, 6.2, 6.4 | ConfigManager (P0), Nodemailer (P0), S2SMessaging (P1) | Service |
+| OAuth2Setting | Client/UI | OAuth 2.0 credential input form | 1.2, 4.1, 4.3, 4.4 | AdminAppContainer (P0), react-hook-form (P0) | State |
+| AdminAppContainer | Client/State | State management for mail settings | 1.3, 1.4, 1.6, 1.7, 4.2, 6.3 | App Settings API (P0) | API |
+| ConfigManager | Server/Service | Persist OAuth 2.0 credentials | 1.5, 6.1, 6.3 | MongoDB (P0) | Service, State |
+| App Settings API | Server/API | Mail settings CRUD operations | 1.3-1.7, 4.5-4.6, 5.5, 6.5 | ConfigManager (P0), MailService (P1) | API |
+| Config Definition | Server/Config | OAuth 2.0 config schema | 1.1, 6.1 | None | State |
+
+### Server / Service Layer
+
+#### MailService
+
+| Field | Detail |
+|-------|--------|
+| Intent | Extend email transmission service with OAuth 2.0 support using Gmail API |
+| Requirements | 2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 5.1, 5.2, 5.3, 5.4, 5.6, 5.7, 6.2, 6.4 |
+| Owner / Reviewers | Backend team |
+
+**Responsibilities & Constraints**
+- Create OAuth 2.0 nodemailer transport using Gmail service with credentials from ConfigManager
+- Handle OAuth 2.0 authentication failures and token refresh errors with specific error logging
+- Implement retry logic with exponential backoff (1s, 2s, 4s) for transient failures
+- Store failed emails after all retry attempts for manual review
+- Maintain single active transmission method (smtp, ses, or oauth2) per instance
+- Invalidate cached OAuth 2.0 tokens when configuration changes via S2S messaging
+
+**Dependencies**
+- Inbound: Crowi container — service initialization (P0)
+- Inbound: Application modules — email sending requests (P0)
+- Inbound: S2S Messaging — config update notifications (P1)
+- Outbound: ConfigManager — load OAuth 2.0 credentials (P0)
+- Outbound: Nodemailer — create transport and send emails (P0)
+- External: Google OAuth 2.0 API — token refresh (P0)
+- External: Gmail API — email transmission (P0)
+
+**Contracts**: Service [x]
+
+##### Service Interface
+
+```typescript
+interface MailServiceOAuth2Extension {
+  /**
+   * Create OAuth 2.0 nodemailer transport for Gmail
+   */
+  createOAuth2Client(option?: OAuth2TransportOptions): Transporter | null;
+
+  /**
+   * Send email with retry logic and error handling
+   */
+  sendWithRetry(config: EmailConfig, maxRetries?: number): Promise<SendResult>;
+
+  /**
+   * Store failed email for manual review
+   */
+  storeFailedEmail(config: EmailConfig, error: Error): Promise<void>;
+
+  /**
+   * Wait with exponential backoff
+   */
+  exponentialBackoff(attempt: number): Promise<void>;
+}
+
+interface OAuth2TransportOptions {
+  user: string;
+  clientId: string;
+  clientSecret: string;
+  refreshToken: string;
+}
+
+interface MailService {
+  send(config: EmailConfig): Promise<void>;
+  initialize(): void;
+  isMailerSetup: boolean;
+}
+
+interface EmailConfig {
+  to: string;
+  from?: string;
+  subject?: string;
+  template: string;
+  vars?: Record<string, unknown>;
+}
+
+interface SendResult {
+  messageId: string;
+  response: string;
+  envelope: {
+    from: string;
+    to: string[];
+  };
+}
+```
+
+- **Preconditions**:
+  - ConfigManager loaded with valid `mail:oauth2*` configuration values
+  - Nodemailer package version supports OAuth 2.0 (v6.x+)
+  - Google OAuth 2.0 refresh token has `https://mail.google.com/` scope
+
+- **Postconditions**:
+  - OAuth 2.0 transport created with automatic token refresh enabled
+  - `isMailerSetup` flag set to true when transport successfully created
+  - Failed transport creation returns null and logs error
+  - Successful email sends logged with messageId and recipient
+  - Failed emails stored after retry exhaustion
+
+- **Invariants**:
+  - Only one transmission method active at a time
+  - Credentials never logged in plain text
+  - Token refresh handled transparently by nodemailer
+  - Retry backoff: 1s, 2s, 4s
+
+
+#### ConfigManager
+
+| Field | Detail |
+|-------|--------|
+| Intent | Persist and retrieve OAuth 2.0 credentials with encryption |
+| Requirements | 1.5, 6.1, 6.3 |
+
+**Responsibilities & Constraints**
+- Store four new OAuth 2.0 config keys with encryption
+- Support transmission method value 'oauth2'
+- Maintain all SMTP and SES config values when OAuth 2.0 is configured
+
+**Dependencies**
+- Inbound: MailService, App Settings API (P0)
+- Outbound: MongoDB, Encryption Service (P0)
+
+**Contracts**: Service [x] / State [x]
+
+##### Service Interface
+
+```typescript
+interface ConfigManagerOAuth2Extension {
+  getConfig(key: 'mail:oauth2User'): string | undefined;
+  getConfig(key: 'mail:oauth2ClientId'): string | undefined;
+  getConfig(key: 'mail:oauth2ClientSecret'): string | undefined;
+  getConfig(key: 'mail:oauth2RefreshToken'): string | undefined;
+  getConfig(key: 'mail:transmissionMethod'): 'smtp' | 'ses' | 'oauth2' | undefined;
+
+  setConfig(key: 'mail:oauth2User', value: string): Promise<void>;
+  setConfig(key: 'mail:oauth2ClientId', value: string): Promise<void>;
+  setConfig(key: 'mail:oauth2ClientSecret', value: string): Promise<void>;
+  setConfig(key: 'mail:oauth2RefreshToken', value: string): Promise<void>;
+  setConfig(key: 'mail:transmissionMethod', value: 'smtp' | 'ses' | 'oauth2'): Promise<void>;
+}
+```
+
+##### State Management
+
+- **State Model**: OAuth 2.0 credentials stored as separate config documents in MongoDB
+- **Persistence**: Encrypted at write time; decrypted at read time
+- **Consistency**: Atomic writes per config key
+- **Concurrency**: Last-write-wins; S2S messaging for eventual consistency
+
+
+### Client / UI Layer
+
+#### OAuth2Setting Component
+
+| Field | Detail |
+|-------|--------|
+| Intent | Render OAuth 2.0 credential input form with help text and field masking |
+| Requirements | 1.2, 4.1, 4.3, 4.4 |
+
+**Responsibilities & Constraints**
+- Display four input fields with help text
+- Mask saved Client Secret and Refresh Token (show last 4 characters)
+- Follow SMTP/SES visual patterns
+- Use react-hook-form register
+
+**Dependencies**
+- Inbound: MailSetting component (P0)
+- Outbound: AdminAppContainer (P1)
+- External: react-hook-form (P0)
+
+**Contracts**: State [x]
+
+##### State Management
+
+```typescript
+interface OAuth2SettingProps {
+  register: UseFormRegister<MailSettingsFormData>;
+  adminAppContainer?: AdminAppContainer;
+}
+
+interface MailSettingsFormData {
+  fromAddress: string;
+  transmissionMethod: 'smtp' | 'ses' | 'oauth2';
+  smtpHost: string;
+  smtpPort: string;
+  smtpUser: string;
+  smtpPassword: string;
+  sesAccessKeyId: string;
+  sesSecretAccessKey: string;
+  oauth2User: string;
+  oauth2ClientId: string;
+  oauth2ClientSecret: string;
+  oauth2RefreshToken: string;
+}
+```
+
+
+#### AdminAppContainer (Extension)
+
+| Field | Detail |
+|-------|--------|
+| Intent | Manage OAuth 2.0 credential state and API interactions |
+| Requirements | 1.3, 1.4, 1.6, 1.7, 4.2, 6.3 |
+
+**Responsibilities & Constraints**
+- Add four state properties and setter methods
+- Include OAuth 2.0 credentials in API payload
+- Validate email format before API call
+- Display success/error toasts
+
+**Dependencies**
+- Inbound: MailSetting, OAuth2Setting (P0)
+- Outbound: App Settings API (P0)
+
+**Contracts**: State [x] / API [x]
+
+##### State Management
+
+```typescript
+interface AdminAppContainerOAuth2State {
+  fromAddress?: string;
+  transmissionMethod?: 'smtp' | 'ses' | 'oauth2';
+  smtpHost?: string;
+  smtpPort?: string;
+  smtpUser?: string;
+  smtpPassword?: string;
+  sesAccessKeyId?: string;
+  sesSecretAccessKey?: string;
+  isMailerSetup: boolean;
+  oauth2User?: string;
+  oauth2ClientId?: string;
+  oauth2ClientSecret?: string;
+  oauth2RefreshToken?: string;
+}
+
+interface AdminAppContainerOAuth2Methods {
+  changeOAuth2User(oauth2User: string): Promise<void>;
+  changeOAuth2ClientId(oauth2ClientId: string): Promise<void>;
+  changeOAuth2ClientSecret(oauth2ClientSecret: string): Promise<void>;
+  changeOAuth2RefreshToken(oauth2RefreshToken: string): Promise<void>;
+  updateMailSettingHandler(): Promise<void>;
+}
+```
+
+
+### Server / API Layer
+
+#### App Settings API (Extension)
+
+| Field | Detail |
+|-------|--------|
+| Intent | Handle OAuth 2.0 credential CRUD operations with validation |
+| Requirements | 1.3, 1.4, 1.5, 1.6, 1.7, 4.5, 4.6, 5.5, 6.5 |
+
+**Responsibilities & Constraints**
+- Accept OAuth 2.0 credentials in PUT request
+- Validate email format and non-empty credentials
+- Persist via ConfigManager
+- Trigger S2S messaging
+- Require admin authentication
+
+**Dependencies**
+- Inbound: AdminAppContainer (P0)
+- Outbound: ConfigManager, MailService, S2S Messaging (P0/P1)
+
+**Contracts**: API [x]
+
+##### API Contract
+
+| Method | Endpoint | Request | Response | Errors |
+|--------|----------|---------|----------|--------|
+| PUT | /api/v3/app-settings | UpdateMailSettingsRequest | AppSettingsResponse | 400, 401, 500 |
+| GET | /api/v3/app-settings | - | AppSettingsResponse | 401, 500 |
+| POST | /api/v3/mail/send-test | - | TestEmailResponse | 400, 401, 500 |
+
+**Request/Response Schemas**:
+
+```typescript
+interface UpdateMailSettingsRequest {
+  'mail:from'?: string;
+  'mail:transmissionMethod'?: 'smtp' | 'ses' | 'oauth2';
+  'mail:smtpHost'?: string;
+  'mail:smtpPort'?: string;
+  'mail:smtpUser'?: string;
+  'mail:smtpPassword'?: string;
+  'mail:sesAccessKeyId'?: string;
+  'mail:sesSecretAccessKey'?: string;
+  'mail:oauth2User'?: string;
+  'mail:oauth2ClientId'?: string;
+  'mail:oauth2ClientSecret'?: string;
+  'mail:oauth2RefreshToken'?: string;
+}
+
+interface AppSettingsResponse {
+  appSettings: {
+    'mail:from'?: string;
+    'mail:transmissionMethod'?: 'smtp' | 'ses' | 'oauth2';
+    'mail:smtpHost'?: string;
+    'mail:smtpPort'?: string;
+    'mail:smtpUser'?: string;
+    'mail:sesAccessKeyId'?: string;
+    'mail:oauth2User'?: string;
+    'mail:oauth2ClientId'?: string;
+  };
+  isMailerSetup: boolean;
+}
+
+interface TestEmailResponse {
+  success: boolean;
+  message?: string;
+  error?: {
+    code: string;
+    message: string;
+  };
+}
+```
+
+
+### Server / Config Layer
+
+#### Config Definition (Extension)
+
+| Field | Detail |
+|-------|--------|
+| Intent | Define OAuth 2.0 configuration schema with type safety |
+| Requirements | 1.1, 6.1 |
+
+**Config Schema**:
+
+```typescript
+const CONFIG_KEYS = [
+  'mail:oauth2User',
+  'mail:oauth2ClientId',
+  'mail:oauth2ClientSecret',
+  'mail:oauth2RefreshToken',
+];
+
+'mail:transmissionMethod': defineConfig<'smtp' | 'ses' | 'oauth2' | undefined>({
+  defaultValue: undefined,
+}),
+
+'mail:oauth2User': defineConfig<string | undefined>({
+  defaultValue: undefined,
+}),
+'mail:oauth2ClientId': defineConfig<string | undefined>({
+  defaultValue: undefined,
+}),
+'mail:oauth2ClientSecret': defineConfig<string | undefined>({
+  defaultValue: undefined,
+  isSecret: true,
+}),
+'mail:oauth2RefreshToken': defineConfig<string | undefined>({
+  defaultValue: undefined,
+  isSecret: true,
+}),
+```
+
+## Data Models
+
+### Domain Model
+
+**Mail Configuration Aggregate**:
+- **Root Entity**: MailConfiguration
+- **Value Objects**: TransmissionMethod, OAuth2Credentials, SmtpCredentials, SesCredentials
+- **Business Rules**: Only one transmission method active; OAuth2Credentials complete when all fields present
+- **Invariants**: Credentials encrypted; FROM address required
+
+### Logical Data Model
+
+**Structure Definition**:
+- **Entity**: Config (MongoDB document)
+- **Attributes**: ns, key, value, createdAt, updatedAt
+- **Natural Keys**: ns field (unique)
+
+**Consistency & Integrity**:
+- **Transaction Boundaries**: Each config key saved independently
+- **Temporal Aspects**: updatedAt tracked per entry
+
+### Physical Data Model
+
+- Config documents stored in MongoDB with ns/key/value pattern
+- FailedEmail documents track failed email attempts with error context
+- **Encryption**: AES-256 for clientSecret and refreshToken via environment-provided key
+
+### Data Contracts & Integration
+
+**API Data Transfer**:
+- OAuth 2.0 credentials via JSON in PUT /api/v3/app-settings
+- Client Secret and Refresh Token never returned in GET responses
+
+**Cross-Service Data Management**:
+- S2S messaging broadcasts mailServiceUpdated event
+- Eventual consistency across instances
+
+
+## Critical Implementation Constraints
+
+### Nodemailer XOAuth2 Compatibility (CRITICAL)
+
+**Constraint**: OAuth 2.0 credential validation **must use falsy checks** (`!value`) not null checks (`value != null`) to match nodemailer's internal XOAuth2 handler behavior.
+
+**Rationale**: Nodemailer's XOAuth2.generateToken() method uses `!this.options.refreshToken` at line 184, which rejects empty strings as invalid. Using `!= null` checks in GROWI would allow empty strings through validation, causing runtime failures when nodemailer rejects them.
+
+**Implementation Pattern**:
+```typescript
+// ✅ CORRECT: Falsy check matches nodemailer behavior
+if (!clientId || !clientSecret || !refreshToken || !user) {
+  return null;
+}
+```
+
+**Impact**: Affects MailService.createOAuth2Client(), ConfigManager validation, and API validators. All OAuth 2.0 credential checks must follow this pattern.
+
+**Reference**: [mail.ts:219-226](../../../apps/app/src/server/service/mail.ts#L219-L226), [research.md](research.md#1-nodemailer-xoauth2-falsy-check-requirement)
+
+---
+
+### Credential Preservation Pattern (CRITICAL)
+
+**Constraint**: PUT requests updating OAuth 2.0 configuration **must only include secret fields (clientSecret, refreshToken) when non-empty values are provided**, preventing accidental credential overwrites.
+
+**Rationale**: Standard PUT pattern sending all form fields would overwrite secrets with empty strings when administrators update non-secret fields (from address, user email). GET endpoint returns `undefined` for secrets (not masked placeholders) to prevent re-submission of placeholder text.
+
+**Implementation Pattern**:
+```typescript
+// Build params with non-secret fields
+const params = {
+  'mail:oauth2ClientId': req.body.oauth2ClientId,
+  'mail:oauth2User': req.body.oauth2User,
+};
+
+// Only include secrets if non-empty
+if (req.body.oauth2ClientSecret) {
+  params['mail:oauth2ClientSecret'] = req.body.oauth2ClientSecret;
+}
+```
+
+**Impact**: Affects App Settings API PUT handler and any future API that updates OAuth 2.0 credentials.
+
+**Reference**: [apiv3/app-settings/index.ts:293-306](../../../apps/app/src/server/routes/apiv3/app-settings/index.ts#L293-L306), [research.md](research.md#3-credential-preservation-pattern)
+
+---
+
+### Gmail API FROM Address Behavior (LIMITATION)
+
+**Limitation**: Gmail API **rewrites FROM addresses to the authenticated account email** unless send-as aliases are configured in Google Workspace.
+
+**Example**:
+```
+Configured: mail:from = "notifications@example.com"
+Authenticated: oauth2User = "admin@company.com"
+Actual sent FROM: "admin@company.com"
+```
+
+**Workaround**: Google Workspace administrators must configure send-as aliases in Gmail Settings → Accounts and Import → Send mail as, then verify domain ownership.
+
+**Why This Happens**: Gmail API security policy prevents email spoofing by restricting FROM addresses to authenticated accounts or verified aliases.
+
+**Impact**: GROWI's `mail:from` configuration has limited effect with OAuth 2.0. Custom FROM addresses require Google Workspace configuration. This is expected Gmail behavior, not a GROWI limitation.
+
+**Reference**: [research.md](research.md#2-gmail-api-from-address-rewriting)
+
+---
+
+### OAuth 2.0 Retry Integration (DESIGN DECISION)
+
+**Decision**: OAuth 2.0 transmission uses `sendWithRetry()` with exponential backoff (1s, 2s, 4s), while SMTP/SES use direct `sendMail()` without retries.
+
+**Rationale**: OAuth 2.0 token refresh can fail transiently due to network issues or Google API rate limiting. Exponential backoff provides resilience without overwhelming the API.
+
+**Implementation**:
+```typescript
+if (transmissionMethod === 'oauth2') {
+  return this.sendWithRetry(mailConfig);
+}
+return this.mailer.sendMail(mailConfig);
+```
+
+**Impact**: OAuth 2.0 email failures are automatically retried, improving reliability for production deployments.
+
+**Reference**: [mail.ts:392-400](../../../apps/app/src/server/service/mail.ts#L392-L400)

+ 57 - 0
.kiro/specs/oauth2-email-support/requirements.md

@@ -0,0 +1,57 @@
+# Requirements Document
+
+## Project Description (Input)
+OAuth 2.0 authentication で Google Workspace を利用し email を送信する機能を追加したい
+
+### Context from User
+This implementation adds OAuth 2.0 authentication support for sending emails using Google Workspace accounts. The feature is fully integrated into the admin settings UI and follows the existing patterns for SMTP and SES configuration.
+
+Key configuration parameters:
+- Email Address: The authorized Google account email
+- Client ID: OAuth 2.0 Client ID from Google Cloud Console
+- Client Secret: OAuth 2.0 Client Secret
+- Refresh Token: OAuth 2.0 Refresh Token obtained from authorization flow
+
+The implementation uses nodemailer's built-in Gmail OAuth 2.0 support, which handles token refresh automatically.
+
+## Introduction
+
+This specification defines the requirements for adding OAuth 2.0 authentication support for email transmission using Google Workspace accounts in GROWI. The feature enables administrators to configure email sending through Google's Gmail API using OAuth 2.0 credentials instead of traditional SMTP authentication. This provides enhanced security through token-based authentication and follows Google's recommended practices for application email integration.
+
+## Requirements
+
+### Requirement 1: OAuth 2.0 Configuration Management
+
+**Objective:** As a GROWI administrator, I want to configure OAuth 2.0 credentials for Google Workspace email sending, so that the system can securely send emails without using SMTP passwords.
+
+**Summary**: The Admin Settings UI provides OAuth 2.0 as a transmission method option alongside SMTP and SES. The configuration form includes fields for Email Address, Client ID, Client Secret, and Refresh Token. All fields are validated (email format, non-empty strings using falsy checks), and secrets are encrypted before database storage. Configuration updates preserve existing secrets when empty values are submitted, preventing accidental credential overwrites. Success and error feedback is displayed to administrators.
+
+### Requirement 2: Email Sending Functionality
+
+**Objective:** As a GROWI system, I want to send emails using OAuth 2.0 authenticated Google Workspace accounts, so that notifications and system emails can be delivered securely without SMTP credentials.
+
+**Summary**: The Email Service uses nodemailer with Gmail OAuth 2.0 transport for email sending when OAuth 2.0 is configured. Authentication to Gmail API is automatic using configured credentials. The service supports all email content types (plain text, HTML, attachments, standard headers). Successful transmissions are logged with timestamp and recipient information. OAuth 2.0 sends use retry logic with exponential backoff (1s, 2s, 4s) to handle transient failures. Note: Gmail API rewrites FROM address to the authenticated account unless send-as aliases are configured in Google Workspace.
+
+### Requirement 3: Token Management
+
+**Objective:** As a GROWI system, I want to automatically manage OAuth 2.0 access token lifecycle, so that email sending continues without manual intervention when tokens expire.
+
+**Summary**: Token refresh is handled automatically by nodemailer's built-in OAuth 2.0 support. Access tokens are cached in memory and reused until expiration. When refresh tokens are used, nodemailer requests new access tokens from Google's OAuth 2.0 endpoint transparently. Token refresh failures are logged with specific error codes for troubleshooting. When OAuth 2.0 configuration is updated, cached tokens are invalidated via service reinitialization triggered by S2S messaging.
+
+### Requirement 4: Admin UI Integration
+
+**Objective:** As a GROWI administrator, I want OAuth 2.0 email configuration to follow the same UI patterns as SMTP and SES, so that I can configure it consistently with existing mail settings.
+
+**Summary**: The Mail Settings page displays OAuth 2.0 configuration form with consistent visual styling, preserves credentials when switching transmission methods, and shows configuration status. Browser autofill is prevented for secret fields, and placeholder text indicates that blank fields will preserve existing values.
+
+### Requirement 5: Error Handling and Security
+
+**Objective:** As a GROWI administrator, I want clear error messages and secure credential handling, so that I can troubleshoot configuration issues and ensure credentials are protected.
+
+**Summary**: Authentication failures are logged with specific OAuth 2.0 error codes from Google's API for troubleshooting. Email sending failures trigger automatic retry with exponential backoff (3 attempts: 1s, 2s, 4s). Failed emails after retry exhaustion are stored in the database for manual review. Credentials are never logged in plain text (Client ID masked to last 4 characters). Admin authentication is required to access configuration. SSL/TLS validation is enforced by nodemailer. When OAuth 2.0 credentials are incomplete or deleted, the Email Service stops sending and displays configuration errors via isMailerSetup flag.
+
+### Requirement 6: Migration and Compatibility
+
+**Objective:** As a GROWI system, I want OAuth 2.0 email support to coexist with existing SMTP and SES configurations, so that administrators can choose the most appropriate transmission method for their deployment.
+
+**Summary**: OAuth 2.0 is added as a third transmission method option without breaking changes to existing SMTP and SES functionality. Only the active transmission method is used for sending emails. Administrators can switch between methods without data loss (credentials for all methods are preserved). Configuration errors are displayed when no transmission method is properly configured (via isMailerSetup flag). OAuth 2.0 configuration status is exposed through existing admin API endpoints following the same pattern as SMTP/SES.

+ 449 - 0
.kiro/specs/oauth2-email-support/research.md

@@ -0,0 +1,449 @@
+# Research & Design Decisions
+
+---
+**Purpose**: Capture discovery findings, architectural investigations, and rationale that inform the technical design for OAuth 2.0 email support.
+
+**Usage**:
+- Log research activities and outcomes during the discovery phase.
+- Document design decision trade-offs that are too detailed for `design.md`.
+- Provide references and evidence for future audits or reuse.
+---
+
+## Summary
+- **Feature**: `oauth2-email-support`
+- **Discovery Scope**: Extension (integrating OAuth2 into existing mail service architecture)
+- **Key Findings**:
+  - Existing mail service supports SMTP and SES via transmission method pattern
+  - Nodemailer has built-in OAuth2 support for Gmail with automatic token refresh
+  - Admin UI follows modular pattern with separate setting components per transmission method
+  - Config management uses `mail:*` namespace with type-safe definitions
+
+## Research Log
+
+### Existing Mail Service Architecture
+
+- **Context**: Need to understand integration points for OAuth2 support
+- **Sources Consulted**:
+  - `apps/app/src/server/service/mail.ts` (MailService implementation)
+  - `apps/app/src/client/components/Admin/App/MailSetting.tsx` (Admin UI)
+  - `apps/app/src/server/service/config-manager/config-definition.ts` (Config schema)
+- **Findings**:
+  - MailService uses factory pattern: `createSMTPClient()`, `createSESClient()`
+  - Transmission method determined by `mail:transmissionMethod` config value ('smtp' | 'ses')
+  - `initialize()` method called on service startup and S2S message updates
+  - Nodemailer transporter created based on transmission method
+  - Admin UI uses conditional rendering for SMTP vs SES settings
+  - State management via AdminAppContainer (unstated pattern)
+  - Test email functionality exists for SMTP only
+- **Implications**:
+  - OAuth2 follows same pattern: add `createOAuth2Client()` method
+  - Extend `mail:transmissionMethod` type to `'smtp' | 'ses' | 'oauth2'`
+  - Create new `OAuth2Setting.tsx` component following SMTP/SES pattern
+  - Add OAuth2-specific config keys following `mail:*` namespace
+
+### Nodemailer OAuth2 Integration
+
+- **Context**: Verify OAuth2 support in nodemailer and configuration requirements
+- **Sources Consulted**:
+  - [OAuth2 | Nodemailer](https://nodemailer.com/smtp/oauth2)
+  - [Using Gmail | Nodemailer](https://nodemailer.com/usage/using-gmail)
+  - [Sending Emails Securely Using Node.js, Nodemailer, SMTP, Gmail, and OAuth2](https://dev.to/chandrapantachhetri/sending-emails-securely-using-node-js-nodemailer-smtp-gmail-and-oauth2-g3a)
+  - Web search: "nodemailer gmail oauth2 configuration 2026"
+- **Findings**:
+  - Nodemailer has first-class OAuth2 support with type `'OAuth2'`
+  - Configuration structure:
+    ```javascript
+    {
+      service: "gmail",
+      auth: {
+        type: "OAuth2",
+        user: "user@gmail.com",
+        clientId: process.env.GOOGLE_CLIENT_ID,
+        clientSecret: process.env.GOOGLE_CLIENT_SECRET,
+        refreshToken: process.env.GOOGLE_REFRESH_TOKEN
+      }
+    }
+    ```
+  - Automatic access token refresh handled by nodemailer
+  - Requires `https://mail.google.com/` OAuth scope
+  - Gmail service shortcut available (simplifies configuration)
+  - Production consideration: Gmail designed for individual users, not automated services
+- **Implications**:
+  - No additional dependencies needed (nodemailer already installed)
+  - Four config values required: user email, clientId, clientSecret, refreshToken
+  - Token refresh is automatic - no manual refresh logic needed
+  - Should validate credentials before saving to config
+  - Security: clientSecret and refreshToken must be encrypted in database
+
+### Config Manager Pattern Analysis
+
+- **Context**: Understand how to add new config keys for OAuth2 credentials
+- **Sources Consulted**:
+  - `apps/app/src/server/service/config-manager/config-definition.ts`
+  - Existing mail config keys: `mail:from`, `mail:transmissionMethod`, `mail:smtpHost`, etc.
+- **Findings**:
+  - Config keys use namespace pattern: `mail:*`
+  - Type-safe definitions using `defineConfig<T>()`
+  - Existing transmission method: `defineConfig<'smtp' | 'ses' | undefined>()`
+  - Config values stored in database via ConfigManager
+  - No explicit encryption layer visible in config definition (handled elsewhere)
+- **Implications**:
+  - Add four new keys: `mail:oauth2User`, `mail:oauth2ClientId`, `mail:oauth2ClientSecret`, `mail:oauth2RefreshToken`
+  - Update `mail:transmissionMethod` type to `'smtp' | 'ses' | 'oauth2' | undefined`
+  - Encryption should be handled at persistence layer (ConfigManager or database model)
+  - Follow same pattern as SMTP/SES for consistency
+
+### Admin UI State Management Pattern
+
+- **Context**: Understand how to integrate OAuth2 settings into admin UI
+- **Sources Consulted**:
+  - `apps/app/src/client/components/Admin/App/SmtpSetting.tsx`
+  - `apps/app/src/client/components/Admin/App/SesSetting.tsx`
+  - `apps/app/src/client/services/AdminAppContainer.js`
+- **Findings**:
+  - Separate component per transmission method (SmtpSetting, SesSetting)
+  - Components receive `register` from react-hook-form
+  - Unstated container pattern for state management
+  - Container methods: `changeSmtpHost()`, `changeFromAddress()`, etc.
+  - `updateMailSettingHandler()` saves all settings via API
+  - Test email button only shown for SMTP
+- **Implications**:
+  - Create `OAuth2Setting.tsx` component following same structure
+  - Add four state methods to AdminAppContainer: `changeOAuth2User()`, `changeOAuth2ClientId()`, etc.
+  - Include OAuth2 credentials in `updateMailSettingHandler()` API call
+  - Test email functionality should work for OAuth2 (same as SMTP)
+  - Field masking needed for clientSecret and refreshToken
+
+### Security Considerations
+
+- **Context**: Ensure secure handling of OAuth2 credentials
+- **Sources Consulted**:
+  - GROWI security guidelines (`.claude/rules/security.md`)
+  - Existing SMTP/SES credential handling
+- **Findings**:
+  - Credentials stored in MongoDB via ConfigManager
+  - Input fields use `type="password"` for sensitive values
+  - No explicit encryption visible in UI layer
+  - Logging should not expose credentials
+- **Implications**:
+  - Use `type="password"` for clientSecret and refreshToken fields
+  - Mask values when displaying saved configuration (show last 4 characters)
+  - Never log credentials in plain text
+  - Validate SSL/TLS when connecting to Google OAuth endpoints
+  - Ensure admin authentication required before accessing config page
+
+## Architecture Pattern Evaluation
+
+| Option | Description | Strengths | Risks / Limitations | Notes |
+|--------|-------------|-----------|---------------------|-------|
+| Factory Method Extension | Add `createOAuth2Client()` to existing MailService | Follows existing pattern, minimal changes, consistent with SMTP/SES | None significant | Recommended - aligns with current architecture |
+| Separate OAuth2Service | Create dedicated service for OAuth2 mail | Better separation of concerns | Over-engineering for simple extension, breaks existing pattern | Not recommended - unnecessary complexity |
+| Adapter Pattern | Wrap OAuth2 in adapter implementing mail interface | More flexible for future auth methods | Premature abstraction, more code to maintain | Not needed for single OAuth2 implementation |
+
+## Design Decisions
+
+### Decision: Extend Existing MailService with OAuth2 Support
+
+- **Context**: Need to add OAuth2 email sending without breaking existing SMTP/SES functionality
+- **Alternatives Considered**:
+  1. Create separate OAuth2MailService - more modular but introduces service management complexity
+  2. Refactor to plugin architecture - future-proof but over-engineered for current needs
+  3. Extend existing MailService with factory method - follows current pattern
+- **Selected Approach**: Extend existing MailService with `createOAuth2Client()` method
+- **Rationale**:
+  - Maintains consistency with existing architecture
+  - Minimal code changes reduce risk
+  - Clear migration path (no breaking changes)
+  - GROWI already uses this pattern successfully for SMTP/SES
+- **Trade-offs**:
+  - Benefits: Low risk, fast implementation, familiar pattern
+  - Compromises: All transmission methods in single service (acceptable given simplicity)
+- **Follow-up**: Ensure test coverage for OAuth2 path alongside existing SMTP/SES tests
+
+### Decision: Use Nodemailer's Built-in OAuth2 Support
+
+- **Context**: Need reliable OAuth2 implementation with automatic token refresh
+- **Alternatives Considered**:
+  1. Manual OAuth2 implementation with googleapis library - more control but complex
+  2. Third-party OAuth2 wrapper - additional dependency
+  3. Nodemailer built-in OAuth2 - zero additional dependencies
+- **Selected Approach**: Use nodemailer's native OAuth2 support with Gmail service
+- **Rationale**:
+  - No additional dependencies (nodemailer already installed)
+  - Automatic token refresh reduces complexity
+  - Well-documented and actively maintained
+  - Matches user's original plan (stated in requirements)
+- **Trade-offs**:
+  - Benefits: Simple, reliable, no new dependencies
+  - Compromises: Limited to Gmail/Google Workspace (acceptable per requirements)
+- **Follow-up**: Document Google Cloud Console setup steps for administrators
+
+### Decision: Preserve Existing Transmission Method Pattern
+
+- **Context**: Maintain backward compatibility while adding OAuth2 option
+- **Alternatives Considered**:
+  1. Deprecate transmission method concept - breaking change
+  2. Add OAuth2 as transmission method option - extends existing pattern
+  3. Support multiple simultaneous methods - unnecessary complexity
+- **Selected Approach**: Add 'oauth2' as third transmission method option
+- **Rationale**:
+  - Zero breaking changes for existing users
+  - Consistent admin UI experience
+  - Clear mutual exclusivity (one method active at a time)
+  - Easy to test and validate
+- **Trade-offs**:
+  - Benefits: Backward compatible, simple mental model
+  - Compromises: Only one transmission method active (acceptable per requirements)
+- **Follow-up**: Ensure switching between methods preserves all config values
+
+### Decision: Component-Based UI Following SMTP/SES Pattern
+
+- **Context**: Need consistent admin UI for OAuth2 configuration
+- **Alternatives Considered**:
+  1. Inline OAuth2 fields in main form - cluttered UI
+  2. Modal dialog for OAuth2 setup - breaks existing pattern
+  3. Separate OAuth2Setting component - matches SMTP/SES pattern
+- **Selected Approach**: Create `OAuth2Setting.tsx` component rendered conditionally
+- **Rationale**:
+  - Maintains visual consistency across transmission methods
+  - Reuses existing form patterns (react-hook-form, unstated)
+  - Easy for admins familiar with SMTP/SES setup
+  - Supports incremental development (component isolation)
+- **Trade-offs**:
+  - Benefits: Consistent UX, modular code, easy testing
+  - Compromises: Minor code duplication in form field rendering (acceptable)
+- **Follow-up**: Add help text for each OAuth2 field explaining Google Cloud Console setup
+
+## Risks & Mitigations
+
+- **Risk**: OAuth2 credentials stored in plain text in database
+  - **Mitigation**: Implement encryption at ConfigManager persistence layer; use same encryption as SMTP passwords
+
+- **Risk**: Refresh token expiration or revocation not handled
+  - **Mitigation**: Nodemailer handles refresh automatically; log specific error codes for troubleshooting; document token refresh in admin help text
+
+- **Risk**: Google rate limiting or account suspension
+  - **Mitigation**: Document production usage considerations; implement exponential backoff retry logic; log detailed error responses from Gmail API
+
+- **Risk**: Incomplete credential configuration causing service failure
+  - **Mitigation**: Validate all four required fields before saving; display clear error messages; maintain isMailerSetup flag for health checks
+
+- **Risk**: Breaking changes to existing SMTP/SES functionality
+  - **Mitigation**: Preserve all existing code paths; add OAuth2 as isolated branch; comprehensive integration tests for all three methods
+
+## Session 2: Production Implementation Discoveries (2026-02-10)
+
+### Critical Technical Constraints Identified
+
+#### 1. Nodemailer XOAuth2 Falsy Check Requirement
+
+**Discovery**: Production testing revealed "Can't create new access token for user" errors from nodemailer's XOAuth2 handler.
+
+**Root Cause**: Nodemailer's XOAuth2 implementation uses **falsy checks** (`!this.options.refreshToken`) at line 184, not null checks, rejecting empty strings as invalid credentials.
+
+**Implementation Requirement**:
+```typescript
+// ❌ WRONG: Allows empty strings to pass validation
+if (clientId != null && clientSecret != null && refreshToken != null) {
+  // This passes validation but nodemailer will reject it
+}
+
+// ✅ CORRECT: Matches nodemailer's falsy check behavior
+if (!clientId || !clientSecret || !refreshToken || !user) {
+  logger.warn('OAuth 2.0 credentials incomplete, skipping transport creation');
+  return null;
+}
+```
+
+**Why This Matters**: Empty strings (`""`) are falsy in JavaScript. Using `!= null` in GROWI would allow empty strings through validation, but nodemailer's falsy check would then reject them, causing runtime failures.
+
+**Impact**: All credential validation logic in MailService and ConfigManager **must use falsy checks** for OAuth 2.0 credentials to maintain compatibility with nodemailer.
+
+**Reference**: [mail.ts:219-226](../../../apps/app/src/server/service/mail.ts#L219-L226)
+
+---
+
+#### 2. Gmail API FROM Address Rewriting
+
+**Discovery**: Gmail API rewrites the FROM address to the authenticated account email, ignoring GROWI's configured `mail:from` address.
+
+**Gmail API Behavior**: Gmail API enforces that emails are sent FROM the authenticated account unless send-as aliases are explicitly configured in Google Workspace.
+
+**Example**:
+```
+Configured: mail:from = "notifications@example.com"
+Authenticated: oauth2User = "admin@company.com"
+Actual sent FROM: "admin@company.com"
+```
+
+**Workaround**: Google Workspace administrators must configure **send-as aliases**:
+1. Gmail Settings → Accounts and Import → Send mail as
+2. Add desired FROM address as an alias
+3. Verify domain ownership
+
+**Why This Happens**: Gmail API security policy prevents email spoofing by restricting FROM addresses to authenticated account or verified aliases.
+
+**Impact**:
+- GROWI's `mail:from` configuration has **limited effect** with OAuth 2.0
+- Custom FROM addresses require Google Workspace send-as alias configuration
+- This is **expected Gmail behavior**, not a GROWI limitation
+
+**Documentation Note**: This behavior must be documented in admin UI help text and user guides.
+
+---
+
+#### 3. Credential Preservation Pattern
+
+**Discovery**: Initial implementation allowed secret credentials to be accidentally overwritten with empty strings or masked placeholder values when updating non-secret fields.
+
+**Problem**: Standard PUT request pattern sending all form fields would overwrite secrets with empty values when administrators only wanted to update non-secret fields like `from` address or `oauth2User`.
+
+**Solution**: Conditional secret inclusion pattern:
+
+```typescript
+// Build request params with non-secret fields
+const requestOAuth2SettingParams: Record<string, any> = {
+  'mail:from': req.body.fromAddress,
+  'mail:transmissionMethod': req.body.transmissionMethod,
+  'mail:oauth2ClientId': req.body.oauth2ClientId,
+  'mail:oauth2User': req.body.oauth2User,
+};
+
+// Only include secrets if non-empty values provided
+if (req.body.oauth2ClientSecret) {
+  requestOAuth2SettingParams['mail:oauth2ClientSecret'] = req.body.oauth2ClientSecret;
+}
+if (req.body.oauth2RefreshToken) {
+  requestOAuth2SettingParams['mail:oauth2RefreshToken'] = req.body.oauth2RefreshToken;
+}
+```
+
+**Frontend Consideration**: GET endpoint returns `undefined` for secrets (not masked values) to prevent accidental re-submission:
+
+```typescript
+// ❌ WRONG: Returns masked value that could be saved back
+oauth2ClientSecret: '(set)',
+
+// ✅ CORRECT: Returns undefined, frontend shows placeholder
+oauth2ClientSecret: undefined,
+```
+
+**Why This Pattern**: Allows administrators to update non-secret OAuth 2.0 settings without re-entering sensitive credentials every time.
+
+**Impact**: This pattern must be followed for **any API that updates OAuth 2.0 credentials** to prevent accidental secret overwrites.
+
+**Reference**:
+- PUT handler: [apiv3/app-settings/index.ts:293-306](../../../apps/app/src/server/routes/apiv3/app-settings/index.ts#L293-L306)
+- GET response: [apiv3/app-settings/index.ts:273-276](../../../apps/app/src/server/routes/apiv3/app-settings/index.ts#L273-L276)
+
+---
+
+### Type Safety Enhancements
+
+**NonBlankString Type**: OAuth 2.0 config definitions use `NonBlankString | undefined` for compile-time protection against empty string assignments:
+
+```typescript
+'mail:oauth2ClientSecret': defineConfig<NonBlankString | undefined>({
+  defaultValue: undefined,
+  isSecret: true,
+}),
+```
+
+This provides **compile-time protection** complementing runtime falsy checks.
+
+---
+
+### Integration Pattern Discovered
+
+**OAuth 2.0 Retry Logic**: OAuth 2.0 requires retry logic with exponential backoff due to potential token refresh failures:
+
+```typescript
+// OAuth 2.0 uses sendWithRetry() for automatic retry
+if (transmissionMethod === 'oauth2') {
+  return this.sendWithRetry(mailConfig as EmailConfig);
+}
+
+// SMTP/SES use direct sendMail()
+return this.mailer.sendMail(mailConfig);
+```
+
+**Rationale**: OAuth 2.0 token refresh can fail transiently due to network issues or Google API rate limiting. Exponential backoff (1s, 2s, 4s) provides resilience.
+
+---
+
+## Session 3: Post-Refactoring Architecture (2026-02-10)
+
+### MailService Modular Structure
+
+The MailService was refactored from a single monolithic file (`mail.ts`, ~408 lines) into a feature-based directory structure with separate transport modules. This is the current production architecture.
+
+#### Directory Structure
+
+```
+src/server/service/mail/
+├── index.ts              # Barrel export (default: MailService, backward-compatible)
+├── mail.ts               # MailService class (orchestration, S2S, retry logic)
+├── mail.spec.ts          # MailService tests
+├── smtp.ts               # SMTP transport factory: createSMTPClient()
+├── smtp.spec.ts          # SMTP transport tests
+├── ses.ts                # SES transport factory: createSESClient()
+├── ses.spec.ts           # SES transport tests
+├── oauth2.ts             # OAuth2 transport factory: createOAuth2Client()
+├── oauth2.spec.ts        # OAuth2 transport tests
+└── types.ts              # Shared types (StrictOAuth2Options, MailConfig, etc.)
+```
+
+#### Transport Factory Pattern
+
+Each transport module exports a factory function with a consistent signature:
+
+```typescript
+export function create[Transport]Client(
+  configManager: IConfigManagerForApp,
+  option?: TransportOptions
+): Transporter | null;
+```
+
+- Returns `null` if required credentials are missing (logs warning)
+- MailService delegates transport creation based on `mail:transmissionMethod` config
+
+#### StrictOAuth2Options Type
+
+Defined in `types.ts`, this branded type prevents empty string credentials at compile time:
+
+```typescript
+import type { NonBlankString } from '@growi/core/dist/interfaces';
+
+export type StrictOAuth2Options = {
+  service: 'gmail';
+  auth: {
+    type: 'OAuth2';
+    user: NonBlankString;
+    clientId: NonBlankString;
+    clientSecret: NonBlankString;
+    refreshToken: NonBlankString;
+  };
+};
+```
+
+This is stricter than nodemailer's default `XOAuth2.Options` which allows `string | undefined`. The branded type ensures compile-time validation complementing runtime falsy checks.
+
+#### Backward Compatibility
+
+The barrel export at `mail/index.ts` maintains the existing import pattern:
+```typescript
+import MailService from '~/server/service/mail';  // Still works
+```
+
+**Source**: Migrated from `.kiro/specs/refactor-mailer-service/` (spec deleted after implementation completion).
+
+---
+
+## References
+
+- [OAuth2 | Nodemailer](https://nodemailer.com/smtp/oauth2) - Official OAuth2 configuration documentation
+- [Using Gmail | Nodemailer](https://nodemailer.com/usage/using-gmail) - Gmail-specific integration guide
+- [Sending Emails Securely Using Node.js, Nodemailer, SMTP, Gmail, and OAuth2](https://dev.to/chandrapantachhetri/sending-emails-securely-using-node-js-nodemailer-smtp-gmail-and-oauth2-g3a) - Implementation tutorial
+- [Using OAuth2 with Nodemailer for Secure Email Sending](https://shazaali.substack.com/p/using-oauth2-with-nodemailer-for) - Security best practices
+- Internal: `apps/app/src/server/service/mail.ts` - Existing mail service implementation
+- Internal: `apps/app/src/client/components/Admin/App/MailSetting.tsx` - Admin UI patterns

+ 23 - 0
.kiro/specs/oauth2-email-support/spec.json

@@ -0,0 +1,23 @@
+{
+  "feature_name": "oauth2-email-support",
+  "created_at": "2026-02-06T11:43:56Z",
+  "updated_at": "2026-02-13T00:00:00Z",
+  "language": "en",
+  "phase": "implementation-complete",
+  "cleanup_completed": true,
+  "approvals": {
+    "requirements": {
+      "generated": true,
+      "approved": true
+    },
+    "design": {
+      "generated": true,
+      "approved": true
+    },
+    "tasks": {
+      "generated": true,
+      "approved": true
+    }
+  },
+  "ready_for_implementation": true
+}

+ 41 - 0
.kiro/specs/oauth2-email-support/tasks.md

@@ -0,0 +1,41 @@
+# Implementation Tasks - OAuth 2.0 Email Support
+
+## Status Overview
+
+**Final Status**: Production-Ready (2026-02-10)
+**Requirements Coverage**: 35/37 (95%)
+
+## Completed Tasks
+
+### Phase A: Critical Production Requirements (3 tasks)
+
+- [x] 1. Retry logic with exponential backoff (1s, 2s, 4s) - Req: 5.1, 5.2
+- [x] 2. Failed email storage after retry exhaustion - Req: 5.3
+- [x] 3. Enhanced OAuth 2.0 error logging - Req: 5.4, 5.7
+
+Session 2 additional fixes:
+- Credential validation changed to falsy check (nodemailer XOAuth2 compatibility)
+- PUT handler preserves secrets when empty values submitted
+- Config types changed to `NonBlankString | undefined`
+- GET response returns `undefined` for secrets
+- Browser autofill prevention (`autoComplete="new-password"`)
+- Static IDs replaced with `useId()` hook (Biome lint compliance)
+
+### Baseline Implementation (12 tasks)
+
+- [x] Configuration schema (4 config keys, encryption, NonBlankString types) - Req: 1.1, 1.5, 6.1
+- [x] OAuth 2.0 transport creation (nodemailer Gmail service) - Req: 2.1, 2.2, 3.1-3.3, 3.5, 6.2
+- [x] Service initialization and token management (S2S integration) - Req: 2.3, 2.5, 2.6, 3.6, 5.6, 6.2, 6.4
+- [x] API validation and persistence (PUT/GET endpoints) - Req: 1.3, 1.4, 1.5, 1.6, 5.5, 6.5
+- [x] Field-specific validation error messages - Req: 1.7
+- [x] OAuth2Setting UI component (react-hook-form integration) - Req: 1.2, 4.1
+- [x] AdminAppContainer state management (4 state properties) - Req: 4.2, 6.3
+- [x] Mail settings form submission - Req: 1.3, 1.6, 1.7
+- [x] Transmission method selection ('oauth2' option) - Req: 1.1, 1.2
+- [x] Multi-language translations (en, ja, fr, ko, zh) - Req: 1.2, 4.1, 4.3
+
+## Not Implemented (Optional Enhancements)
+
+- Help text for 2 of 4 fields incomplete (Req 4.3)
+- Credential field masking in UI (Req 4.4)
+- Test email button for OAuth 2.0 (Req 4.5)

+ 233 - 0
.kiro/specs/official-docker-image/design.md

@@ -0,0 +1,233 @@
+# Design Document: official-docker-image
+
+## Overview
+
+**Purpose**: Modernize the Dockerfile and entrypoint for the GROWI official Docker image based on 2025-2026 best practices, achieving enhanced security, optimized memory management, and improved build efficiency.
+
+**Users**: Infrastructure administrators (build/deploy), GROWI operators (memory tuning), and Docker image end users (usage via docker-compose).
+
+**Impact**: Redesign the existing 3-stage Dockerfile into a 5-stage configuration. Migrate the base image to Docker Hardened Images (DHI). Change the entrypoint from a shell script to TypeScript (using Node.js 24 native TypeScript execution), achieving a fully hardened configuration that requires no shell.
+
+### Goals
+
+- Up to 95% CVE reduction through DHI base image adoption
+- **Fully shell-free TypeScript entrypoint** — Node.js 24 native TypeScript execution (type stripping), maintaining the minimized attack surface of the DHI runtime as-is
+- Memory management via 3-tier fallback: `V8_MAX_HEAP_SIZE` / cgroup auto-calculation / V8 default
+- Environment variable names aligned with V8 option names (`V8_MAX_HEAP_SIZE`, `V8_OPTIMIZE_FOR_SIZE`, `V8_LITE_MODE`)
+- Improved build cache efficiency through the `turbo prune --docker` pattern
+- Privilege drop via gosu → `process.setuid/setgid` (Node.js native)
+
+### Non-Goals
+
+- Changes to Kubernetes manifests / Helm charts (GROWI.cloud `V8_MAX_HEAP_SIZE` configuration is out of scope)
+- Application code changes (adding gc(), migrating to .pipe(), etc. are separate specs)
+- Updating docker-compose.yml (documentation updates only)
+- Support for Node.js versions below 24
+- Adding HEALTHCHECK instructions (k8s uses its own probes, Docker Compose users can configure their own)
+
+## Architecture
+
+### Existing Architecture Analysis
+
+**Current Dockerfile 3-stage configuration:**
+
+| Stage | Base Image | Role |
+|-------|-----------|------|
+| `base` | `node:20-slim` | Install pnpm + turbo |
+| `builder` | `base` | `COPY . .` → install → build → artifacts |
+| release (unnamed) | `node:20-slim` | gosu install → artifact extraction → execution |
+
+**Main issues:**
+- `COPY . .` includes the entire monorepo in the build layer
+- pnpm version is hardcoded (`PNPM_VERSION="10.32.1"`)
+- Typo in `---frozen-lockfile`
+- Base image is node:20-slim (prone to CVE accumulation)
+- No memory management flags
+- No OCI labels
+- gosu installation requires apt-get (runtime dependency on apt)
+
+### Architecture Pattern & Boundary Map
+
+```mermaid
+graph TB
+    subgraph BuildPhase
+        base[base stage<br>DHI dev + pnpm + turbo]
+        pruner[pruner stage<br>turbo prune --docker]
+        deps[deps stage<br>dependency install]
+        builder[builder stage<br>build + artifacts]
+    end
+
+    subgraph ReleasePhase
+        release[release stage<br>DHI runtime - no shell]
+    end
+
+    base --> pruner
+    pruner --> deps
+    deps --> builder
+    builder -->|artifacts| release
+
+    subgraph RuntimeFiles
+        entrypoint[docker-entrypoint.ts<br>TypeScript entrypoint]
+    end
+
+    entrypoint --> release
+```
+
+**Architecture Integration:**
+- Selected pattern: Multi-stage build with dependency caching separation
+- Domain boundaries: Build concerns (stages 1-4) vs Runtime concerns (stage 5 + entrypoint)
+- Existing patterns preserved: Production dependency extraction via pnpm deploy, tar.gz artifact transfer
+- New components: pruner stage (turbo prune), TypeScript entrypoint
+- **Key change**: gosu + shell script → TypeScript entrypoint (`process.setuid/setgid` + `fs` module + `child_process.execFileSync/spawn`). Eliminates the need for copying busybox/bash, maintaining the minimized attack surface of the DHI runtime as-is. Executes `.ts` directly via Node.js 24 type stripping
+- Steering compliance: Maintains Debian base (glibc performance), maintains monorepo build pattern
+
+### Technology Stack
+
+| Layer | Choice / Version | Role in Feature | Notes |
+|-------|------------------|-----------------|-------|
+| Base Image (build) | `dhi.io/node:24-debian13-dev` | Base for build stages | apt/bash/git/util-linux available |
+| Base Image (runtime) | `dhi.io/node:24-debian13` | Base for release stage | Minimal configuration, 95% CVE reduction, **no shell** |
+| Entrypoint | Node.js (TypeScript) | Initialization, heap calculation, privilege drop, process startup | Node.js 24 native type stripping, no busybox/bash needed |
+| Privilege Drop | `process.setuid/setgid` (Node.js) | root → node user switch | No external binaries needed |
+| Build Tool | `turbo prune --docker` | Monorepo minimization | Official Turborepo recommendation |
+| Package Manager | pnpm (wget standalone) | Dependency management | corepack not adopted (scheduled for removal in Node.js 25+) |
+
+> For the rationale behind adopting the TypeScript entrypoint and comparison with busybox-static/setpriv, see `research.md`.
+
+## System Flows
+
+### Entrypoint Execution Flow
+
+```mermaid
+flowchart TD
+    Start[Container Start<br>as root via node entrypoint.ts] --> Setup[Directory Setup<br>fs.mkdirSync + symlinkSync + chownSync]
+    Setup --> HeapCalc{V8_MAX_HEAP_SIZE<br>is set?}
+    HeapCalc -->|Yes| UseEnv[Use V8_MAX_HEAP_SIZE]
+    HeapCalc -->|No| CgroupCheck{cgroup limit<br>detectable?}
+    CgroupCheck -->|Yes| AutoCalc[Auto-calculate<br>60% of cgroup limit]
+    CgroupCheck -->|No| NoFlag[No heap flag<br>V8 default]
+    UseEnv --> OptFlags[Check V8_OPTIMIZE_FOR_SIZE<br>and V8_LITE_MODE]
+    AutoCalc --> OptFlags
+    NoFlag --> OptFlags
+    OptFlags --> LogFlags[console.log applied flags]
+    LogFlags --> DropPriv[Drop privileges<br>process.setgid + setuid]
+    DropPriv --> Migration[Run migration<br>execFileSync node migrate-mongo]
+    Migration --> SpawnApp[Spawn app process<br>node --max-heap-size=X ... app.js]
+    SpawnApp --> SignalFwd[Forward SIGTERM/SIGINT<br>to child process]
+```
+
+**Key Decisions:**
+- Prioritize cgroup v2 (`/sys/fs/cgroup/memory.max`), fall back to v1
+- Treat cgroup v1 unlimited value (very large number) as no flag (threshold: 64GB)
+- `--max-heap-size` is passed to the spawned child process (the application itself), not the entrypoint process
+- Migration is invoked directly via `child_process.execFileSync` calling node (no `npm run`, no shell needed)
+- App startup uses `child_process.spawn` + signal forwarding to fulfill PID 1 responsibilities
+
+### Docker Build Flow
+
+```mermaid
+flowchart LR
+    subgraph Stage1[base]
+        S1[DHI dev image<br>+ pnpm + turbo]
+    end
+
+    subgraph Stage2[pruner]
+        S2A[COPY monorepo]
+        S2B[turbo prune --docker]
+    end
+
+    subgraph Stage3[deps]
+        S3A[COPY json + lockfile]
+        S3B[pnpm install --frozen-lockfile]
+    end
+
+    subgraph Stage4[builder]
+        S4A[COPY full source]
+        S4B[turbo run build]
+        S4C[pnpm deploy + tar.gz]
+    end
+
+    subgraph Stage5[release]
+        S5A[DHI runtime<br>no additional binaries]
+        S5B[Extract artifacts]
+        S5C[COPY entrypoint.js]
+    end
+
+    Stage1 --> Stage2 --> Stage3 --> Stage4
+    Stage4 -->|tar.gz| Stage5
+```
+
+## Components and Interfaces
+
+| Component | Domain/Layer | Intent | Key Dependencies |
+|-----------|-------------|--------|-----------------|
+| Dockerfile | Infrastructure | 5-stage Docker image build definition | DHI images, turbo, pnpm |
+| docker-entrypoint.ts | Infrastructure | Container startup initialization (TypeScript) | Node.js fs/child_process, cgroup fs |
+| docker-entrypoint.spec.ts | Infrastructure | Unit tests for entrypoint | vitest |
+| Dockerfile.dockerignore | Infrastructure | Build context filter | — |
+| README.md | Documentation | Docker Hub image documentation | — |
+| buildspec.yml | CI/CD | CodeBuild build definition | AWS Secrets Manager, dhi.io |
+
+### Dockerfile
+
+**Responsibilities & Constraints**
+- 5-stage configuration: `base` → `pruner` → `deps` → `builder` → `release`
+- Use of DHI base images (`dhi.io/node:24-debian13-dev` / `dhi.io/node:24-debian13`)
+- **No shell or additional binary copying in runtime** (everything is handled by the Node.js entrypoint)
+
+**Stage Definitions:**
+- **base**: DHI dev image + pnpm (wget) + turbo + apt packages (`ca-certificates`, `wget`)
+- **pruner**: `COPY . .` + `turbo prune @growi/app --docker`
+- **deps**: COPY json/lockfile from pruner + `pnpm install --frozen-lockfile` + node-gyp
+- **builder**: COPY full source from pruner + `turbo run build` + `pnpm deploy` + artifact packaging
+- **release**: DHI runtime (no shell) + `COPY --from=builder` artifacts + entrypoint + OCI labels + EXPOSE/VOLUME
+
+### docker-entrypoint.ts
+
+**Responsibilities & Constraints**
+- Written in TypeScript, executed via Node.js 24 native type stripping (enums not allowed)
+- Directory setup as root (`/data/uploads` + symlink, `/tmp/page-bulk-export`)
+- Heap size determination via 3-tier fallback
+- Privilege drop via `process.setgid()` + `process.setuid()`
+- Migration execution via `child_process.execFileSync` (direct node invocation, no shell)
+- App process startup via `child_process.spawn` with signal forwarding (PID 1 responsibilities)
+- No external binary dependencies
+
+**Environment Variable Interface**
+
+| Variable | Type | Default | Description |
+|----------|------|---------|-------------|
+| `V8_MAX_HEAP_SIZE` | int (MB) | (unset) | Explicitly specify the --max-heap-size value for Node.js |
+| `V8_OPTIMIZE_FOR_SIZE` | `"true"` / (unset) | (unset) | Enable the --optimize-for-size flag |
+| `V8_LITE_MODE` | `"true"` / (unset) | (unset) | Enable the --lite-mode flag |
+
+> **Naming Convention**: Environment variable names are aligned with their corresponding V8 option names (`--max-heap-size`, `--optimize-for-size`, `--lite-mode`) prefixed with `V8_`. This improves discoverability and self-documentation compared to the previous `GROWI_`-prefixed names.
+
+**Batch Contract**
+- **Trigger**: On container startup (`ENTRYPOINT ["node", "/docker-entrypoint.ts"]`)
+- **Input validation**: V8_MAX_HEAP_SIZE (positive int, empty = unset), V8_OPTIMIZE_FOR_SIZE/V8_LITE_MODE (only `"true"` is valid), cgroup v2 (`memory.max`: numeric or `"max"`), cgroup v1 (`memory.limit_in_bytes`: numeric, large value = unlimited)
+- **Output**: Node flags passed directly as arguments to `child_process.spawn`
+- **Idempotency**: Executed on every restart, safe via `fs.mkdirSync({ recursive: true })`
+
+### README.md
+
+**Responsibilities & Constraints**
+- Docker Hub image documentation (published to hub.docker.com/r/growilabs/growi)
+- Document the V8 memory management environment variables under Configuration > Environment Variables section
+- Include variable name, type, default, and description for each: `V8_MAX_HEAP_SIZE`, `V8_OPTIMIZE_FOR_SIZE`, `V8_LITE_MODE`
+
+## Error Handling
+
+| Error | Category | Response |
+|-------|----------|----------|
+| cgroup file read failure | System | Warn and continue with no flag (V8 default) |
+| V8_MAX_HEAP_SIZE is invalid | User | Warn and continue with no flag (container still starts) |
+| Directory creation/permission failure | System | `process.exit(1)` — check volume mount configuration |
+| Migration failure | Business Logic | `execFileSync` throws → `process.exit(1)` — Docker/k8s restarts |
+| App process abnormal exit | System | Propagate child process exit code |
+
+## Performance & Scalability
+
+- **Build cache**: `turbo prune --docker` caches the dependency install layer. Skips dependency installation during rebuilds when only source code changes
+- **Image size**: No additional binaries in DHI runtime. Base layer is smaller compared to node:24-slim
+- **Memory efficiency**: Total heap control via `--max-heap-size` avoids the v24 trusted_space overhead issue. Prevents memory pressure in multi-tenant environments

+ 82 - 0
.kiro/specs/official-docker-image/requirements.md

@@ -0,0 +1,82 @@
+# Requirements Document
+
+## Introduction
+
+Modernize and optimize the GROWI official Docker image's Dockerfile (`apps/app/docker/Dockerfile`) and `docker-entrypoint.sh` based on 2025-2026 best practices. Target Node.js 24 and incorporate findings from the memory report (`apps/app/tmp/memory-results/REPORT.md`) to improve memory management.
+
+### Summary of Current State Analysis
+
+**Current Dockerfile structure:**
+- 3-stage structure: `base` → `builder` → `release` (based on node:20-slim)
+- Monorepo build with pnpm + turbo, production dependency extraction via `pnpm deploy`
+- Privilege drop from root to node user using gosu (after directory creation in entrypoint)
+- `COPY . .` copies the entire context into the builder
+- Application starts after running `npm run migrate` in CMD
+
+**GROWI-specific design intentions (items to maintain):**
+- Privilege drop pattern: The entrypoint must create and set permissions for `/data/uploads` and `/tmp/page-bulk-export` with root privileges, then drop to the node user for execution
+- `pnpm deploy --prod`: The official method for extracting only production dependencies from a pnpm monorepo
+- Inter-stage artifact transfer via tar.gz: Cleanly transfers build artifacts to the release stage
+- `apps/app/tmp` directory: Required in the production image as files are placed there during operation
+- `--expose_gc` flag: Required for explicitly calling `gc()` in batch processing (ES rebuild, import, etc.)
+- `npm run migrate` in CMD: Automatically runs migrations at startup for the convenience of Docker image users
+
+**References:**
+- [Future Architect: 2024 Dockerfile Best Practices](https://future-architect.github.io/articles/20240726a/)
+- [Snyk: 10 best practices to containerize Node.js](https://snyk.io/blog/10-best-practices-to-containerize-nodejs-web-applications-with-docker/)
+- [ByteScrum: Dockerfile Best Practices 2025](https://blog.bytescrum.com/dockerfile-best-practices-2025-secure-fast-and-modern)
+- [OneUptime: Docker Health Check Best Practices 2026](https://oneuptime.com/blog/post/2026-01-30-docker-health-check-best-practices/view)
+- [Docker: Introduction to heredocs in Dockerfiles](https://www.docker.com/blog/introduction-to-heredocs-in-dockerfiles/)
+- [Docker Hardened Images: Node.js Migration Guide](https://docs.docker.com/dhi/migration/examples/node/)
+- [Docker Hardened Images Catalog: Node.js](https://hub.docker.com/hardened-images/catalog/dhi/node)
+- GROWI Memory Usage Investigation Report (`apps/app/tmp/memory-results/REPORT.md`)
+
+## Requirements
+
+### Requirement 1: Modernize Base Image and Build Environment
+
+**Objective:** As an infrastructure administrator, I want the Dockerfile's base image and syntax to comply with the latest best practices, so that security patch application, performance improvements, and maintainability enhancements are achieved
+
+**Summary**: DHI base images adopted (`dhi.io/node:24-debian13-dev` for build, `dhi.io/node:24-debian13` for release) with up to 95% CVE reduction. Syntax directive updated to auto-follow latest stable. pnpm installed via wget standalone script (corepack not adopted due to planned removal in Node.js 25+). Fixed `---frozen-lockfile` typo and eliminated hardcoded pnpm version.
+
+### Requirement 2: Memory Management Optimization
+
+**Objective:** As a GROWI operator, I want the Node.js heap size to be appropriately controlled according to container memory constraints, so that the risk of OOMKilled is reduced and memory efficiency in multi-tenant environments is improved
+
+**Summary**: 3-tier heap size fallback implemented in docker-entrypoint.ts: (1) `GROWI_HEAP_SIZE` env var, (2) cgroup v2/v1 auto-calculation at 60%, (3) V8 default. Uses `--max-heap-size` (not `--max_old_space_size`) passed as direct spawn arguments (not `NODE_OPTIONS`). Additional flags: `--optimize-for-size` via `GROWI_OPTIMIZE_MEMORY=true`, `--lite-mode` via `GROWI_LITE_MODE=true`.
+
+### Requirement 3: Build Efficiency and Cache Optimization
+
+**Objective:** As a developer, I want Docker builds to be fast and efficient, so that CI/CD pipeline build times are reduced and image size is minimized
+
+**Summary**: `turbo prune --docker` pattern adopted to eliminate `COPY . .` and maximize layer cache (dependency install cached separately from source changes). pnpm store and apt-get cache mounts maintained. `.next/cache` excluded from release stage. Artifact transfer uses `COPY --from=builder` (adapted from design's `--mount=type=bind,from=builder` due to shell-less DHI runtime).
+
+### Requirement 4: Security Hardening
+
+**Objective:** As a security officer, I want the Docker image to comply with security best practices, so that the attack surface is minimized and the safety of the production environment is improved
+
+**Summary**: Non-root execution via Node.js native `process.setuid/setgid` (no gosu/setpriv). Release stage contains no unnecessary packages — no shell, no apt, no build tools. Enhanced `.dockerignore` excludes `.git`, secrets, test files, IDE configs. `--no-install-recommends` used for apt-get in build stage.
+
+### Requirement 5: Operability and Observability Improvement
+
+**Objective:** As an operations engineer, I want the Docker image to have appropriate metadata configured, so that management by container orchestrators is facilitated
+
+**Summary**: OCI standard LABEL annotations added (`org.opencontainers.image.source`, `.title`, `.description`, `.vendor`). `EXPOSE 3000` and `VOLUME /data` maintained.
+
+### Requirement 6: Entrypoint and CMD Refactoring
+
+**Objective:** As a developer, I want the entrypoint script and CMD to have a clear and maintainable structure, so that dynamic assembly of memory flags and future extensions are facilitated
+
+**Summary**: Entrypoint rewritten in TypeScript (`docker-entrypoint.ts`) executed via Node.js 24 native type stripping. Handles: directory setup (`/data/uploads`, `/tmp/page-bulk-export`), heap size calculation (3-tier fallback), privilege drop (`process.setgid` + `process.setuid`), migration execution (`execFileSync`), app process spawn with signal forwarding. Always includes `--expose_gc`. Logs applied flags to stdout.
+
+### Requirement 7: Backward Compatibility
+
+**Objective:** As an existing Docker image user, I want existing operations to not break when migrating to the new Dockerfile, so that the risk during upgrades is minimized
+
+**Summary**: Full backward compatibility maintained. Environment variables (`MONGO_URI`, `FILE_UPLOAD`, etc.), `VOLUME /data`, port 3000, and docker-compose usage patterns all work as before. Without memory management env vars, behavior is equivalent to V8 defaults.
+
+### Requirement 8: Production Replacement and CI/CD Support
+
+**Objective:** As an infrastructure administrator, I want the artifacts in the docker-new directory to officially replace the existing docker directory and the CI/CD pipeline to operate with the new Dockerfile, so that DHI-based images are used in production builds
+
+**Summary**: All files moved from `apps/app/docker-new/` to `apps/app/docker/`, old files deleted. Dockerfile self-referencing path updated. `docker login dhi.io` added to buildspec.yml pre_build phase, reusing existing `DOCKER_REGISTRY_PASSWORD` secret. `codebuild/` directory and `README.md` maintained.

+ 288 - 0
.kiro/specs/official-docker-image/research.md

@@ -0,0 +1,288 @@
+# Research & Design Decisions
+
+---
+**Purpose**: Discovery findings and design decision rationale for the official Docker image modernization.
+---
+
+## Summary
+- **Feature**: `official-docker-image`
+- **Discovery Scope**: Extension (major improvement of existing Dockerfile)
+- **Key Findings**:
+  - The DHI runtime image (`dhi.io/node:24-debian13`) is a minimal configuration that does not include a shell, package manager, or coreutils. By adopting a Node.js entrypoint (TypeScript), a configuration requiring no shell or additional binaries is achieved
+  - `--mount=type=bind` is impractical for monorepo multi-step builds. `turbo prune --docker` is the officially recommended Docker optimization approach by Turborepo
+  - gosu is replaced by Node.js native `process.setuid/setgid`. External binaries (gosu/setpriv/busybox) are completely unnecessary
+  - HEALTHCHECK is not adopted (k8s uses its own probes. Docker Compose users can configure it themselves)
+  - Node.js 24 supports native TypeScript execution (type stripping). The entrypoint can be written in TypeScript
+
+## Research Log
+
+### DHI Runtime Image Configuration
+
+- **Context**: Investigation of constraints when adopting `dhi.io/node:24-debian13` as the base image for the release stage
+- **Sources Consulted**:
+  - [DHI Catalog GitHub](https://github.com/docker-hardened-images/catalog) — `image/node/debian-13/` directory
+  - [DHI Documentation](https://docs.docker.com/dhi/)
+  - [DHI Use an Image](https://docs.docker.com/dhi/how-to/use/)
+- **Findings**:
+  - Pre-installed packages in the runtime image: only `base-files`, `ca-certificates`, `libc6`, `libgomp1`, `libstdc++6`, `netbase`, `tzdata`
+  - **No shell**, **no apt**, **no coreutils**, **no curl/wget**
+  - Default user: `node` (UID 1000, GID 1000)
+  - Dev image (`-dev`): `apt`, `bash`, `git`, `util-linux`, `coreutils`, etc. are pre-installed
+  - Available tags: `dhi.io/node:24-debian13`, `dhi.io/node:24-debian13-dev`
+  - Platforms: `linux/amd64`, `linux/arm64`
+- **Implications**:
+  - By writing the entrypoint in Node.js (TypeScript), neither a shell nor additional binaries are needed at all
+  - gosu/setpriv are replaced by Node.js native `process.setuid/setgid`. No need to copy external binaries
+  - HEALTHCHECK is not adopted (k8s uses its own probes). Health checks via curl/Node.js http module are unnecessary
+
+### Applicability of `--mount=type=bind` in Monorepo Builds
+
+- **Context**: Investigation of the feasibility of Requirement 3.1 "Use `--mount=type=bind` instead of `COPY . .` in the builder stage"
+- **Sources Consulted**:
+  - [Docker Build Cache Optimization](https://docs.docker.com/build/cache/optimize/)
+  - [Dockerfile Reference - RUN --mount](https://docs.docker.com/reference/dockerfile/)
+  - [pnpm Docker Documentation](https://pnpm.io/docker)
+  - [Turborepo Docker Guide](https://turbo.build/repo/docs/handbook/deploying-with-docker)
+- **Findings**:
+  - `--mount=type=bind` is **only valid during the execution of a RUN instruction** and is not carried over to the next RUN instruction
+  - In the multi-step process of monorepo builds (install -> build -> deploy), each step depends on artifacts from the previous step, making it difficult to achieve with bind mounts alone
+  - It is possible to combine all steps into a single RUN, but this loses the benefits of layer caching
+  - **Turborepo official recommendation**: Use `turbo prune --docker` to minimize the monorepo for Docker
+    - `out/json/` — only package.json files needed for dependency install
+    - `out/pnpm-lock.yaml` — lockfile
+    - `out/full/` — source code needed for the build
+  - This approach avoids `COPY . .` while leveraging layer caching
+- **Implications**:
+  - Requirement 3.1 should be achieved using the `turbo prune --docker` pattern instead of `--mount=type=bind`
+  - The goal (minimizing source code layers / improving cache efficiency) can be equally achieved
+  - **However**, compatibility of `turbo prune --docker` with pnpm workspaces needs to be verified during implementation
+
+### Alternatives to gosu
+
+- **Context**: Investigation of alternatives since gosu is not available in the DHI runtime image
+- **Sources Consulted**:
+  - [gosu GitHub](https://github.com/tianon/gosu) — list of alternative tools
+  - [Debian Packages - gosu in trixie](https://packages.debian.org/trixie/admin/gosu)
+  - [PhotoPrism: Switch from gosu to setpriv](https://github.com/photoprism/photoprism/pull/2730)
+  - [MongoDB Docker: Replace gosu by setpriv](https://github.com/docker-library/mongo/pull/714)
+  - Node.js `process.setuid/setgid` documentation
+- **Findings**:
+  - `setpriv` is part of `util-linux` and is pre-installed in the DHI dev image
+  - `gosu node command` can be replaced with `setpriv --reuid=node --regid=node --init-groups -- command`
+  - PhotoPrism and the official MongoDB Docker image have already migrated from gosu to setpriv
+  - **Node.js native**: Can be fully replaced with `process.setgid(1000)` + `process.setuid(1000)` + `process.initgroups('node', 1000)`
+  - When adopting a Node.js entrypoint, no external binaries (gosu/setpriv/busybox) are needed at all
+- **Implications**:
+  - **Final decision**: Adopt Node.js native `process.setuid/setgid` (setpriv is also unnecessary)
+  - No need to copy gosu/setpriv binaries, resulting in no additional binaries in the release stage
+  - Maintains the minimized attack surface of the DHI runtime as-is
+
+### HEALTHCHECK Implementation Approach (Not Adopted)
+
+- **Context**: Investigation of HEALTHCHECK implementation approaches since curl is not available in the DHI runtime image
+- **Sources Consulted**:
+  - [Docker Healthchecks in Distroless Node.js](https://www.mattknight.io/blog/docker-healthchecks-in-distroless-node-js)
+  - [Docker Healthchecks: Why Not to Use curl](https://blog.sixeyed.com/docker-healthchecks-why-not-to-use-curl-or-iwr/)
+  - GROWI healthcheck endpoint: `apps/app/src/server/routes/apiv3/healthcheck.ts`
+- **Findings**:
+  - Node.js `http` module is sufficient (curl is unnecessary)
+  - GROWI's `/_api/v3/healthcheck` endpoint returns `{ status: 'OK' }` without any parameters
+  - Docker HEALTHCHECK is useful for Docker Compose's `depends_on: service_healthy` dependency order control
+  - In k8s environments, custom probes (liveness/readiness) are used, so the Dockerfile's HEALTHCHECK is unnecessary
+- **Implications**:
+  - **Final decision: Not adopted**. k8s uses its own probes, and Docker Compose users can configure it themselves in compose.yaml
+  - By not including HEALTHCHECK in the Dockerfile, simplicity is maintained
+
+### Shell Dependency of npm run migrate
+
+- **Context**: Investigation of whether `npm run migrate` within CMD requires a shell
+- **Sources Consulted**:
+  - GROWI `apps/app/package.json`'s `migrate` script
+- **Findings**:
+  - The actual `migrate` script content: `node -r dotenv-flow/config node_modules/migrate-mongo/bin/migrate-mongo up -f config/migrate-mongo-config.js`
+  - `npm run` internally uses `sh -c`, so a shell is required
+  - Alternative: Running the script contents directly with node eliminates the need for npm/sh
+  - However, using npm run is more maintainable (can track changes in package.json)
+- **Implications**:
+  - **Final decision**: Use `child_process.execFileSync` in the Node.js entrypoint to directly execute the migration command (not using npm run, no shell needed)
+  - Adopt the approach of directly writing the `migrate` script contents within the entrypoint
+  - When package.json changes, the entrypoint also needs to be updated, but priority is given to fully shell-less DHI runtime
+
+### Node.js 24 Native TypeScript Execution
+
+- **Context**: Investigation of whether Node.js 24's native TypeScript execution feature can be used when writing the entrypoint in TypeScript
+- **Sources Consulted**:
+  - [Node.js 23 Release Notes](https://nodejs.org/en/blog/release/v23.0.0) — `--experimental-strip-types` unflagged
+  - [Node.js Type Stripping Documentation](https://nodejs.org/docs/latest/api/typescript.html)
+- **Findings**:
+  - From Node.js 23, type stripping is enabled by default (no `--experimental-strip-types` flag needed)
+  - Available as a stable feature in Node.js 24
+  - **Constraint**: "Non-erasable syntax" such as enum and namespace cannot be used. `--experimental-transform-types` is required for those
+  - interface, type alias, and type annotations (`: string`, `: number`, etc.) can be used without issues
+  - Can be executed directly with `ENTRYPOINT ["node", "docker-entrypoint.ts"]`
+- **Implications**:
+  - The entrypoint can be written in TypeScript, enabling type-safe implementation
+  - Do not use enum; use union types (`type Foo = 'a' | 'b'`) as alternatives
+  - tsconfig.json is not required (type stripping operates independently)
+
+## Architecture Pattern Evaluation
+
+| Option | Description | Strengths | Risks / Limitations | Notes |
+|--------|-------------|-----------|---------------------|-------|
+| DHI runtime + busybox-static | Copy busybox-static to provide sh/coreutils | Minimal addition (~1MB) enables full functionality | Contradicts the original intent of DHI adoption (minimizing attack surface). Additional binaries are attack vectors | Rejected |
+| DHI runtime + bash/coreutils copy | Copy bash and various binaries individually from the dev stage | Full bash functionality available | Shared library dependencies are complex, many files need to be copied | Rejected |
+| DHI dev image as runtime | Use the dev image as-is for production | Minimal configuration changes | Increased attack surface due to apt/git etc., diminishes the meaning of DHI | Rejected |
+| Node.js entrypoint (TypeScript, shell-less) | Write the entrypoint in TypeScript. Runs with Node.js 24's native TypeScript execution | Completely shell-free, maintains DHI runtime's attack surface as-is, type-safe | Migration command written directly (not using npm run), updates needed when package.json changes | **Adopted** |
+
+## Design Decisions
+
+### Decision: Node.js TypeScript Entrypoint (Completely Shell-Free)
+
+- **Context**: The DHI runtime image contains neither a shell nor coreutils. Copying busybox-static contradicts the intent of DHI adoption (minimizing attack surface)
+- **Alternatives Considered**:
+  1. Copy busybox-static to provide shell + coreutils — Contradicts DHI's attack surface minimization
+  2. Copy bash + coreutils individually — Complex dependencies
+  3. Node.js TypeScript entrypoint — Everything can be accomplished with `fs`, `child_process`, and `process.setuid/setgid`
+- **Selected Approach**: Write the entrypoint in TypeScript (`docker-entrypoint.ts`). Execute directly using Node.js 24's native TypeScript execution (type stripping)
+- **Rationale**: No additional binaries needed in the DHI runtime whatsoever. Directory operations via fs module, privilege dropping via process.setuid/setgid, migration via execFileSync, and app startup via spawn. Improved maintainability through type safety
+- **Trade-offs**: Migration command is written directly (not using npm run). When the migrate script in package.json changes, the entrypoint also needs to be updated
+- **Follow-up**: Verify that Node.js 24's type stripping works correctly with a single-file entrypoint without import statements
+
+### Decision: Privilege Dropping via Node.js Native process.setuid/setgid
+
+- **Context**: gosu cannot be installed in the DHI runtime. busybox-static/setpriv are also not adopted (policy of eliminating additional binaries)
+- **Alternatives Considered**:
+  1. Copy gosu binary — Works but goes against industry trends
+  2. Copy setpriv binary — Works but goes against the policy of eliminating additional binaries
+  3. Node.js `process.setuid/setgid` — Standard Node.js API
+  4. Docker `--user` flag — Cannot handle dynamic processing in the entrypoint
+- **Selected Approach**: Drop privileges with `process.initgroups('node', 1000)` + `process.setgid(1000)` + `process.setuid(1000)`
+- **Rationale**: No external binaries needed at all. Can be called directly within the Node.js entrypoint. Safe privilege dropping in the order setgid -> setuid
+- **Trade-offs**: The entrypoint starts as a Node.js process running as root, and the app becomes its child process (not an exec like gosu). However, the app process is separated via spawn, and signal forwarding fulfills PID 1 responsibilities
+- **Follow-up**: None
+
+### Decision: turbo prune --docker Pattern
+
+- **Context**: Requirement 3.1 requires eliminating `COPY . .`, but `--mount=type=bind` is impractical for monorepo builds
+- **Alternatives Considered**:
+  1. `--mount=type=bind` — Does not persist across RUN instructions, unsuitable for multi-step builds
+  2. Combine all steps into a single RUN — Poor cache efficiency
+  3. `turbo prune --docker` — Officially recommended by Turborepo
+- **Selected Approach**: Use `turbo prune --docker` to minimize the monorepo for Docker, using optimized COPY patterns
+- **Rationale**: Officially recommended by Turborepo. Separates dependency install and source copy to maximize layer cache utilization. Eliminates `COPY . .` while remaining practical
+- **Trade-offs**: One additional build stage (pruner stage), but offset by improved cache efficiency
+- **Follow-up**: Verify `turbo prune --docker` compatibility with pnpm workspaces during implementation
+
+### Decision: Flag Injection via spawn Arguments
+
+- **Context**: `--max-heap-size` cannot be used in `NODE_OPTIONS`. It needs to be passed as a direct argument to the node command
+- **Alternatives Considered**:
+  1. Export environment variable `GROWI_NODE_FLAGS` and inject via shell variable expansion in CMD — Requires a shell
+  2. Rewrite CMD string with sed in the entrypoint — Fragile
+  3. Pass directly as arguments to `child_process.spawn` in the Node.js entrypoint — No shell needed
+- **Selected Approach**: Build a flag array within the entrypoint and pass it directly with `spawn(process.execPath, [...nodeFlags, ...appArgs])`
+- **Rationale**: No shell variable expansion needed. Passed directly as an array, resulting in zero risk of shell injection. Natural integration with the Node.js entrypoint
+- **Trade-offs**: CMD becomes unnecessary (the entrypoint handles all startup processing). Overriding the command with docker run does not affect the logic within the entrypoint
+- **Follow-up**: None
+
+### DHI Registry Authentication and CI/CD Integration
+
+- **Context**: Investigation of the authentication method required for pulling DHI base images and how to integrate with the existing CodeBuild pipeline
+- **Sources Consulted**:
+  - [DHI How to Use an Image](https://docs.docker.com/dhi/how-to/use/) — DHI usage instructions
+  - Existing `apps/app/docker/codebuild/buildspec.yml` — Current CodeBuild build definition
+  - Existing `apps/app/docker/codebuild/secretsmanager.tf` — AWS Secrets Manager configuration
+- **Findings**:
+  - DHI uses Docker Hub credentials (DHI is a feature of Docker Business/Team subscriptions)
+  - Authentication is possible with `docker login dhi.io --username <dockerhub-user> --password-stdin`
+  - The existing buildspec.yml is already logged into docker.io with the `DOCKER_REGISTRY_PASSWORD` secret
+  - The same credentials can be used to log into `dhi.io` as well (no additional secrets required)
+  - The flow of CodeBuild's `reusable-app-build-image.yml` -> CodeBuild Project -> buildspec.yml does not need to change
+- **Implications**:
+  - Can be addressed by simply adding one line of `docker login dhi.io` to the pre_build in buildspec.yml
+  - No changes to `secretsmanager.tf` are needed
+  - Login to both Docker Hub and DHI is required (docker.io for push, dhi.io for pull)
+
+### Impact Scope of Directory Replacement (Codebase Investigation)
+
+- **Context**: Confirming that existing references will not break when replacing `apps/app/docker-new/` with `apps/app/docker/`
+- **Sources Consulted**: Grep investigation of the entire codebase with the `apps/app/docker` keyword
+- **Findings**:
+  - `buildspec.yml`: `-f ./apps/app/docker/Dockerfile` — Same path after replacement (no change needed)
+  - `codebuild.tf`: `buildspec = "apps/app/docker/codebuild/buildspec.yml"` — Same (no change needed)
+  - `.github/workflows/release.yml`: `readme-filepath: ./apps/app/docker/README.md` — Same (no change needed)
+  - `.github/workflows/ci-app.yml` / `ci-app-prod.yml`: `!apps/app/docker/**` exclusion pattern — Same (no change needed)
+  - `apps/app/bin/github-actions/update-readme.sh`: `cd docker` + sed — Same (no change needed)
+  - Within Dockerfile: line 122 `apps/app/docker-new/docker-entrypoint.ts` — **Needs updating** (self-referencing path)
+  - `package.json` and `vitest.config` for docker-related references — None
+  - `lefthook.yml` for docker-related hooks — None
+- **Implications**:
+  - Only one location within the Dockerfile (self-referencing path) needs to be updated during replacement
+  - All external references (CI/CD, GitHub Actions) already use the `apps/app/docker/` path and require no changes
+  - The `codebuild/` directory and `README.md` are maintained as-is within `docker/`
+
+### Environment Variable Renaming: GROWI_ prefix → V8_ prefix
+
+- **Context**: The initial implementation used `GROWI_HEAP_SIZE`, `GROWI_OPTIMIZE_MEMORY`, and `GROWI_LITE_MODE` as environment variable names. These names obscure the relationship between the env var and the underlying V8 flag it controls
+- **Motivation**: Align environment variable names with the actual V8 option names they map to, improving discoverability and self-documentation
+- **Mapping**:
+  | Old Name | New Name | V8 Flag |
+  |----------|----------|---------|
+  | `GROWI_HEAP_SIZE` | `V8_MAX_HEAP_SIZE` | `--max-heap-size` |
+  | `GROWI_OPTIMIZE_MEMORY` | `V8_OPTIMIZE_FOR_SIZE` | `--optimize-for-size` |
+  | `GROWI_LITE_MODE` | `V8_LITE_MODE` | `--lite-mode` |
+- **Benefits**:
+  - Users can immediately understand which V8 flag each variable controls
+  - Naming convention is consistent: `V8_` prefix + option name in UPPER_SNAKE_CASE
+  - No need to consult documentation to understand the mapping
+- **Impact scope**:
+  - `docker-entrypoint.ts`: Code changes (env var reads, comments, log messages)
+  - `docker-entrypoint.spec.ts`: Test updates (env var references in test cases)
+  - `README.md`: Add documentation for the new environment variables
+  - `design.md`, `requirements.md`, `tasks.md`: Spec document updates
+- **Breaking change**: Yes — users who have configured `GROWI_HEAP_SIZE`, `GROWI_OPTIMIZE_MEMORY`, or `GROWI_LITE_MODE` in their docker-compose.yml or deployment configs will need to update to the new names. This is acceptable as these variables were introduced in the same release (v7.5.x) and have not been published yet
+- **Implications**: No backward compatibility shim needed since the variables are new in this version
+
+## Risks & Mitigations
+
+- **Stability of Node.js 24 native TypeScript execution**: Type stripping was unflagged in Node.js 23. It is a stable feature in Node.js 24. However, non-erasable syntax such as enum cannot be used -> Use only interface/type
+- **Direct description of migration command**: The `migrate` script from package.json is written directly in the entrypoint, so synchronization is needed when changes occur -> Clearly noted in comments during implementation
+- **turbo prune compatibility with pnpm workspaces**: Verify during implementation. If incompatible, fall back to an optimized COPY pattern
+- **Limitations of process.setuid/setgid**: `process.initgroups` is required for supplementary group initialization. The order setgid -> setuid must be strictly followed
+- **docker login requirement for DHI images**: `docker login dhi.io` is required in CI/CD. Security considerations for credential management are needed
+
+## Production Implementation Discoveries
+
+### DHI Dev Image Minimal Configuration (Phase 1 E2E)
+
+- **Issue**: The DHI dev image (`dhi.io/node:24-debian13-dev`) did not include the `which` command
+- **Resolution**: Changed pnpm installation from `SHELL="$(which sh)"` to `SHELL=/bin/sh`
+- **Impact**: Minor — only affects the pnpm install script invocation
+
+### Complete Absence of Shell in DHI Runtime Image (Phase 1 E2E)
+
+- **Issue**: The DHI runtime image (`dhi.io/node:24-debian13`) did not have `/bin/sh`. The design planned `--mount=type=bind,from=builder` + `RUN tar -zxf`, but `RUN` instructions require `/bin/sh`
+- **Resolution**:
+  - **builder stage**: Changed from `tar -zcf` to `cp -a` into a staging directory `/tmp/release/`
+  - **release stage**: Changed from `RUN --mount=type=bind... tar -zxf` to `COPY --from=builder --chown=node:node`
+- **Impact**: Design Req 3.5 (`--mount=type=bind,from=builder` pattern) was replaced with `COPY --from=builder`. The security goal of not requiring a shell at runtime was achieved even more robustly
+- **Lesson**: DHI runtime images are truly minimal — `COPY`, `WORKDIR`, `ENV`, `LABEL`, `ENTRYPOINT` are processed by the Docker daemon and do not require a shell
+
+### process.initgroups() Type Definition Gap
+
+- **Issue**: `process.initgroups('node', 1000)` was called for in the design, but implementation was deferred because the type definition does not exist in `@types/node`
+- **Status**: Deferred (Known Issue)
+- **Runtime**: `process.initgroups` does exist at runtime in Node.js 24
+- **Workaround options**: Wait for `@types/node` fix, or use `(process as any).initgroups('node', 1000)`
+- **Practical impact**: Low — the node user in a Docker container typically has no supplementary groups
+
+## References
+
+- [Docker Hardened Images Documentation](https://docs.docker.com/dhi/) — Overview and usage of DHI
+- [DHI Catalog GitHub](https://github.com/docker-hardened-images/catalog) — Image definitions and tag list
+- [Turborepo Docker Guide](https://turbo.build/repo/docs/handbook/deploying-with-docker) — turbo prune --docker pattern
+- [pnpm Docker Documentation](https://pnpm.io/docker) — pnpm Docker build recommendations
+- [Future Architect: 2024 Edition Dockerfile Best Practices](https://future-architect.github.io/articles/20240726a/) — Modern Dockerfile syntax
+- [MongoDB Docker: gosu -> setpriv](https://github.com/docker-library/mongo/pull/714) — Precedent for setpriv migration
+- [Docker Healthchecks in Distroless](https://www.mattknight.io/blog/docker-healthchecks-in-distroless-node-js) — Health checks without curl
+- GROWI memory usage investigation report (`apps/app/tmp/memory-results/REPORT.md`) — Basis for heap size control

+ 22 - 0
.kiro/specs/official-docker-image/spec.json

@@ -0,0 +1,22 @@
+{
+  "feature_name": "official-docker-image",
+  "created_at": "2026-02-20T00:00:00.000Z",
+  "updated_at": "2026-02-24T15:30:00.000Z",
+  "language": "en",
+  "phase": "tasks-generated",
+  "approvals": {
+    "requirements": {
+      "generated": true,
+      "approved": true
+    },
+    "design": {
+      "generated": true,
+      "approved": true
+    },
+    "tasks": {
+      "generated": true,
+      "approved": false
+    }
+  },
+  "ready_for_implementation": true
+}

+ 193 - 0
.kiro/specs/official-docker-image/tasks.md

@@ -0,0 +1,193 @@
+# Implementation Plan
+
+> **Task ordering design policy**:
+> - **Phase 1 (this phase)**: Reproduce an image with the same specifications as the current one using a DHI base image + TypeScript entrypoint. The build pipeline (3-stage structure using `COPY . .`) is kept as-is, **prioritizing a safe runtime migration**.
+> - **Phase 2 (next phase)**: Introduction of build optimization via the `turbo prune --docker` pattern. This will be done after runtime is stable in Phase 1. Adding pruner/deps stages to create a 5-stage structure.
+>
+> **Implementation directory**: Create new files in `apps/app/docker-new/`. The existing `apps/app/docker/` will not be modified at all. Maintain a state where parallel comparison and verification is possible.
+>
+> Directory permission handling is implemented and tested as the highest priority to detect regressions early. Since the entrypoint (TypeScript) and Dockerfile are independent files, some tasks can be executed in parallel.
+
+## Phase 1: DHI + TypeScript entrypoint (maintaining current build pattern)
+
+- [x] 1. (P) Strengthen build context filter
+  - Add `.git`, `.env*` (except production), test files, IDE configuration files, etc. to the current exclusion rules
+  - Verify that security-sensitive files (secrets, credentials) are not included in the context
+  - Maintain the current exclusion rules (`node_modules`, `.next`, `.turbo`, `apps/slackbot-proxy`, etc.)
+  - _Requirements: 4.3_
+
+- [x] 2. TypeScript entrypoint directory initialization and permission management
+- [x] 2.1 (P) Create entrypoint skeleton and recursive chown helper
+  - Create a new TypeScript file that can be directly executed with Node.js 24 type stripping (no enums, erasable syntax only)
+  - Structure the main execution flow as a `main()` function with top-level try-catch for error handling
+  - Implement a helper function that recursively changes ownership of files and subdirectories within a directory
+  - Create unit tests for the helper function (verify recursive behavior with nested directory structures)
+  - _Requirements: 6.8_
+
+- [x] 2.2 Implement directory initialization processing
+  - Implement creation of `/data/uploads`, symlink creation to `./public/uploads`, and recursive ownership change
+  - Implement creation of `/tmp/page-bulk-export`, recursive ownership change, and permission 700 setting
+  - Ensure idempotency (`recursive: true` for mkdir, prevent duplicate symlink creation)
+  - Create unit tests that **guarantee the same behavior as the current `docker-entrypoint.sh`** (using fs mocks, verifying each state of directories, symlinks, ownership, and permissions)
+  - Verify that the process exits (exit code 1) on failure (e.g., volume mount not configured)
+  - _Requirements: 6.3, 6.4_
+
+- [x] 2.3 Implement privilege dropping
+  - Implement demotion from root to node user (UID 1000, GID 1000)
+  - Initialize supplementary groups, strictly following the order of setgid then setuid (reverse order causes setgid to fail)
+  - Output an error message and exit the process on privilege drop failure
+  - _Requirements: 4.1, 6.2_
+
+- [x] 3. Heap size calculation and node flag assembly
+- [x] 3.1 (P) Implement cgroup memory limit detection
+  - Implement reading and numeric parsing of cgroup v2 files (treat the `"max"` string as unlimited)
+  - Implement fallback to cgroup v1 files (treat values exceeding 64GB as unlimited)
+  - Calculate 60% of the memory limit as the heap size (in MB)
+  - On file read failure, output a warning log and continue without flags (V8 default)
+  - Create unit tests for each pattern (v2 normal detection, v2 unlimited, v1 fallback, v1 unlimited, detection unavailable)
+  - _Requirements: 2.2, 2.3_
+
+- [x] 3.2 (P) Implement heap size specification via environment variable
+  - Implement parsing and validation of the `GROWI_HEAP_SIZE` environment variable (positive integer, in MB)
+  - On invalid values (NaN, negative numbers, empty string), output a warning log and fall back to no flags
+  - Confirm via tests that the environment variable takes priority over cgroup auto-calculation
+  - _Requirements: 2.1_
+
+- [x] 3.3 Implement node flag assembly and log output
+  - Implement the 3-tier fallback integration logic (environment variable -> cgroup calculation -> V8 default)
+  - Always include the `--expose_gc` flag
+  - Add `--optimize-for-size` when `GROWI_OPTIMIZE_MEMORY=true`, and `--lite-mode` when `GROWI_LITE_MODE=true`
+  - Pass `--max-heap-size` directly as a spawn argument (do not use `--max_old_space_size`, do not include in `NODE_OPTIONS`)
+  - Log the applied flags to stdout (including which tier determined the value)
+  - Create unit tests for each combination of environment variables (all unset, HEAP_SIZE only, all enabled, etc.)
+  - _Requirements: 2.4, 2.5, 2.6, 2.7, 6.1, 6.6, 6.7_
+
+- [x] 4. Migration execution and app process management
+- [x] 4.1 Direct migration execution
+  - Execute migrate-mongo by directly calling the node binary (do not use npm run, do not go through a shell)
+  - Inherit stdio to display migration logs
+  - On migration failure, catch the exception and exit the process, prompting restart by the container orchestrator
+  - _Requirements: 6.5_
+
+- [x] 4.2 App process startup and signal management
+  - Start the application as a child process with the calculated node flags included in the arguments
+  - Forward SIGTERM, SIGINT, and SIGHUP to the child process
+  - Propagate the child process exit code (or signal) as the entrypoint exit code
+  - Create tests to verify PID 1 responsibilities (signal forwarding, child process reaping, graceful shutdown)
+  - _Requirements: 6.2, 6.5_
+
+- [x] 5. Dockerfile reconstruction (current 3-stage pattern + DHI)
+- [x] 5.1 (P) Build the base stage
+  - Set the DHI dev image as the base and update the syntax directive to auto-follow the latest stable version
+  - Install pnpm via wget standalone script (eliminate hardcoded versions)
+  - Install turbo globally
+  - Install packages required for building with `--no-install-recommends` and apply apt cache mounts
+  - _Requirements: 1.1, 1.2, 1.3, 1.5, 3.3, 4.4_
+
+- [x] 5.2 Build the builder stage
+  - Maintain the current `COPY . .` pattern to copy the entire monorepo, then install dependencies, build, and extract production dependencies
+  - Fix the `--frozen-lockfile` typo (3 dashes -> 2 dashes)
+  - Configure pnpm store cache mounts to reduce rebuild time
+  - Extract only production dependencies and package them into tar.gz (including the `apps/app/tmp` directory)
+  - Guarantee that `.next/cache` is not included in the artifact
+  - _Requirements: 1.4, 3.2, 3.4_
+
+- [x] 5.3 Build the release stage
+  - Set the DHI runtime image as the base with no additional binary copying
+  - Extract build stage artifacts via bind mount
+  - COPY the TypeScript entrypoint file and set ENTRYPOINT to direct execution via node
+  - Verify that build tools (turbo, pnpm, node-gyp, etc.) and build packages (wget, curl, etc.) are not included in the release stage
+  - _Requirements: 1.1, 3.5, 4.2, 4.5_
+
+- [x] 5.4 (P) Configure OCI labels and port/volume declarations
+  - Set OCI standard labels (source, title, description, vendor)
+  - Maintain `EXPOSE 3000` and `VOLUME /data`
+  - _Requirements: 5.1, 5.2, 5.3_
+
+- [x] 6. Integration verification and backward compatibility confirmation
+- [x] 6.1 Docker build E2E verification
+  - Execute a Docker build where all 3 stages complete successfully and confirm there are no build errors
+  - Verify that the release image does not contain a shell, apt, or build tools
+  - _Requirements: 1.1, 4.2, 4.5_
+
+- [x] 6.2 Runtime behavior and backward compatibility verification
+  - Verify that environment variables (`MONGO_URI`, `FILE_UPLOAD`, etc.) are transparently passed to the application as before
+  - Verify compatibility with `/data` volume mounts and file upload functionality
+  - Verify listening on port 3000
+  - Verify that V8 default behavior is used when memory management environment variables are not set
+  - Verify startup with `docker compose up` and graceful shutdown via SIGTERM
+  - _Requirements: 7.1, 7.2, 7.3, 7.4, 7.5_
+
+## Phase 2: turbo prune --docker build optimization
+
+> To be done after runtime is stable in Phase 1. Migrate from the current `COPY . .` + 3-stage structure to a `turbo prune --docker` + 5-stage structure to improve build cache efficiency.
+
+- [x] 7. Introduction of turbo prune --docker pattern
+- [x] 7.1 Create pruner stage
+  - Add a pruner stage immediately after the base stage, minimizing the monorepo for Docker with `turbo prune @growi/app @growi/pdf-converter --docker`
+  - Reason for including `@growi/pdf-converter`: `@growi/pdf-converter-client/turbo.json` has a task dependency on `@growi/pdf-converter#gen:swagger-spec`, so turbo cannot resolve task dependencies unless it is included in the pruned workspace
+  - Verified compatibility with pnpm workspace (18 packages are correctly output)
+  - Confirmed that the output (json directory, lockfile, full directory) is generated correctly
+  - _Requirements: 3.1_
+
+- [x] 7.2 Separate deps stage and restructure builder
+  - Separate dependency installation from the builder stage into an independent deps stage
+  - Copy only the package.json files and lockfile from pruner output to install dependencies (layer cache optimization)
+  - Change the builder stage to a structure that uses deps as a base and only copies source code and builds
+  - Verify that the dependency installation layer is cached when there are no dependency changes and only source code changes
+  - _Requirements: 3.1, 3.2_
+
+- [x] 7.3 Integration verification of 5-stage structure
+  - Confirm that all 5 stages (base -> pruner -> deps -> builder -> release) complete successfully
+  - Confirm that the same runtime behavior as the Phase 1 3-stage structure is maintained
+  - Verify improvement in build cache efficiency (dependency installation is skipped when only source code changes)
+  - _Requirements: 3.1, 3.2, 3.4_
+
+## Phase 3: Production replacement and CI/CD support
+
+> To be done after the 5-stage structure is stable in Phase 2. Move the artifacts from `apps/app/docker-new/` to `apps/app/docker/`, delete the old files, and update the CI/CD pipeline for DHI support.
+
+- [x] 8. Production replacement and CI/CD support
+- [x] 8.1 (P) Replace docker-new directory with docker directory
+  - Delete old files in `apps/app/docker/` (old `Dockerfile`, `docker-entrypoint.sh`, old `Dockerfile.dockerignore`)
+  - Move all files in `apps/app/docker-new/` (`Dockerfile`, `docker-entrypoint.ts`, `docker-entrypoint.spec.ts`, `Dockerfile.dockerignore`) to `apps/app/docker/`
+  - Delete the `apps/app/docker-new/` directory
+  - Confirm that the `codebuild/` directory and `README.md` are maintained within `apps/app/docker/`
+  - Update the entrypoint copy path in the Dockerfile (from `apps/app/docker-new/docker-entrypoint.ts` to `apps/app/docker/docker-entrypoint.ts`)
+  - _Requirements: 8.1, 8.2_
+
+- [x] 8.2 (P) Add DHI registry login to buildspec.yml
+  - Add a `docker login dhi.io` command to the pre_build phase of `apps/app/docker/codebuild/buildspec.yml`
+  - DHI uses Docker Hub credentials, so reuse the existing `DOCKER_REGISTRY_PASSWORD` secret and `growimoogle` username
+  - Confirm that the Dockerfile path in buildspec.yml (`./apps/app/docker/Dockerfile`) is correct after replacement
+  - _Requirements: 8.3, 8.4_
+
+- [x] 8.3 Integration verification after replacement
+  - Confirm that Docker build completes successfully with the replaced `apps/app/docker/Dockerfile`
+  - Confirm that existing external references (`codebuild.tf`, `.github/workflows/release.yml`, `ci-app.yml`, `update-readme.sh`) work correctly
+  - _Requirements: 8.1, 8.2, 8.3, 8.4_
+
+## Phase 4: Environment variable renaming and README documentation
+
+> Rename the `GROWI_`-prefixed memory management environment variables to `V8_`-prefixed names aligned with V8 option names, and add documentation to the Docker Hub README.
+
+- [x] 9. Rename environment variables to align with V8 option names
+- [x] 9.1 (P) Rename all GROWI_-prefixed environment variables to V8_-prefixed names in the entrypoint
+  - Rename `GROWI_HEAP_SIZE` to `V8_MAX_HEAP_SIZE` in the heap size detection function, validation logic, and error messages
+  - Rename `GROWI_OPTIMIZE_MEMORY` to `V8_OPTIMIZE_FOR_SIZE` in the node flag assembly function
+  - Rename `GROWI_LITE_MODE` to `V8_LITE_MODE` in the node flag assembly function
+  - Update the heap size source log message to reflect the new variable name
+  - Update the file header comment documenting the heap size detection fallback chain
+  - _Requirements: 2.1, 2.5, 2.6, 2.7, 6.1, 6.6, 6.7_
+
+- [x] 9.2 (P) Update all environment variable references in entrypoint unit tests
+  - Update heap size detection tests: replace all `GROWI_HEAP_SIZE` references with `V8_MAX_HEAP_SIZE`
+  - Update node flag assembly tests: replace `GROWI_OPTIMIZE_MEMORY` with `V8_OPTIMIZE_FOR_SIZE` and `GROWI_LITE_MODE` with `V8_LITE_MODE`
+  - Verify all tests pass with the new environment variable names
+  - _Requirements: 2.1, 2.5, 2.6_
+
+- [x] 10. Add V8 memory management environment variable documentation to README
+  - Add a subsection under Configuration > Environment Variables documenting the three V8 memory management variables
+  - Include variable name, type, default value, and description for each: `V8_MAX_HEAP_SIZE`, `V8_OPTIMIZE_FOR_SIZE`, `V8_LITE_MODE`
+  - Describe the 3-tier heap size fallback behavior (env var → cgroup auto-calculation → V8 default)
+  - _Requirements: 5.1_

+ 284 - 0
.kiro/specs/presentation/design.md

@@ -0,0 +1,284 @@
+# Design Document: presentation
+
+## Overview
+
+**Purpose**: This feature decouples heavy Marp rendering dependencies (`@marp-team/marp-core`, `@marp-team/marpit`) from the common slide rendering path, so they are loaded only when a page explicitly uses `marp: true` frontmatter.
+
+**Users**: All GROWI users benefit from reduced JavaScript payload when viewing slide pages that do not use Marp. Developers benefit from clearer module boundaries.
+
+**Impact**: Changes the `@growi/presentation` package's internal import structure. No external API or behavioral changes.
+
+### Goals
+- Eliminate `@marp-team/marp-core` and `@marp-team/marpit` from the GrowiSlides module graph
+- Load MarpSlides and its Marp dependencies only on demand via dynamic import
+- Maintain identical rendering behavior for all slide types
+
+### Non-Goals
+- Optimizing the `useSlidesByFrontmatter` hook (already lightweight with internal dynamic imports)
+- Changing the app-level lazy-loading architecture (`useLazyLoader`, `next/dynamic` for the modal)
+- Modifying the Marp rendering logic or configuration itself
+- Reducing the size of `@marp-team/marp-core` internals
+
+## Architecture
+
+### Existing Architecture Analysis
+
+Current module dependency graph for `Slides.tsx`:
+
+```mermaid
+graph TD
+  Slides[Slides.tsx]
+  MarpSlides[MarpSlides.tsx]
+  GrowiSlides[GrowiSlides.tsx]
+  GrowiMarpit[growi-marpit.ts]
+  MarpCore[marp-team/marp-core]
+  Marpit[marp-team/marpit]
+
+  Slides --> MarpSlides
+  Slides --> GrowiSlides
+  MarpSlides --> GrowiMarpit
+  GrowiSlides --> GrowiMarpit
+  GrowiMarpit --> MarpCore
+  GrowiMarpit --> Marpit
+```
+
+**Problem**: Both `MarpSlides` and `GrowiSlides` statically import `growi-marpit.ts`, which instantiates Marp at module scope. This forces ~896KB of Marp modules to load even when rendering non-Marp slides.
+
+**Constraints**:
+- `growi-marpit.ts` exports both runtime Marp instances and a simple string constant (`MARP_CONTAINER_CLASS_NAME`)
+- `GrowiSlides` uses Marp only for CSS extraction (`marpit.render('')`), not for content rendering
+- The package uses Vite with `preserveModules: true` and `nodeExternals({ devDeps: true })`
+
+### Architecture Pattern & Boundary Map
+
+Target module dependency graph:
+
+```mermaid
+graph TD
+  Slides[Slides.tsx]
+  MarpSlides[MarpSlides.tsx]
+  GrowiSlides[GrowiSlides.tsx]
+  GrowiMarpit[growi-marpit.ts]
+  MarpCore[marp-team/marp-core]
+  Marpit[marp-team/marpit]
+  Consts[consts/index.ts]
+  BaseCss[consts/marpit-base-css.ts]
+
+  Slides -.->|React.lazy| MarpSlides
+  Slides --> GrowiSlides
+  MarpSlides --> GrowiMarpit
+  GrowiMarpit --> MarpCore
+  GrowiMarpit --> Marpit
+  GrowiMarpit --> Consts
+  GrowiSlides --> Consts
+  GrowiSlides --> BaseCss
+```
+
+**Key changes**:
+- Dashed arrow = dynamic import (React.lazy), solid arrow = static import
+- `GrowiSlides` no longer imports `growi-marpit.ts`; uses pre-extracted CSS from `marpit-base-css.ts`
+- `MARP_CONTAINER_CLASS_NAME` moves to shared `consts/index.ts`
+- Marp modules are only reachable through the dynamic `MarpSlides` path
+
+**Steering compliance**:
+- Follows "subpath imports over barrel imports" principle
+- Aligns with `next/dynamic({ ssr: false })` technique from build-optimization skill
+- Preserves existing lazy-loading boundaries at app level
+
+### Technology Stack
+
+| Layer | Choice / Version | Role in Feature | Notes |
+|-------|------------------|-----------------|-------|
+| Frontend | React 18 (`React.lazy`, `Suspense`) | Dynamic import boundary for MarpSlides | Standard React code-splitting; SSR not a concern (see research.md) |
+| Build | Vite (existing) | Package compilation with `preserveModules` | Dynamic `import()` preserved in output |
+| Build script | Node.js ESM (.mjs) | CSS extraction at build time | No additional tooling dependency |
+
+## System Flows
+
+### Slide Rendering Decision Flow
+
+```mermaid
+flowchart TD
+  A[Slides component receives props] --> B{hasMarpFlag?}
+  B -->|true| C[React.lazy loads MarpSlides chunk]
+  C --> D[MarpSlides renders via marp-core]
+  B -->|false| E[GrowiSlides renders with pre-extracted CSS]
+  E --> F[No Marp modules loaded]
+```
+
+### Build-Time CSS Extraction Flow
+
+```mermaid
+flowchart TD
+  A[pnpm run build] --> B[pre:build:src script runs]
+  B --> C[extract-marpit-css.mjs executes]
+  C --> D[Instantiate Marp with config]
+  D --> E[Call slideMarpit.render and presentationMarpit.render]
+  E --> F[Write marpit-base-css.ts with CSS constants]
+  F --> G[vite build compiles all sources]
+```
+
+## Components and Interfaces
+
+| Component | Domain | Intent | Req Coverage | Key Dependencies | Contracts |
+|-----------|--------|--------|--------------|------------------|-----------|
+| Slides | UI / Routing | Route between MarpSlides and GrowiSlides based on hasMarpFlag | 2.1, 2.2, 2.3 | MarpSlides (P1, dynamic), GrowiSlides (P0, static) | — |
+| GrowiSlides | UI / Rendering | Render non-Marp slides with pre-extracted CSS | 1.1, 1.2, 1.3, 1.4 | consts (P0), marpit-base-css (P0) | — |
+| marpit-base-css | Constants | Provide pre-extracted Marp theme CSS | 1.3, 3.3 | — | State |
+| consts/index.ts | Constants | Shared constants including MARP_CONTAINER_CLASS_NAME | 1.4 | — | — |
+| growi-marpit.ts | Service | Marp engine setup (unchanged, now only reached via MarpSlides) | 4.1, 4.3 | marp-core (P0, external), marpit (P0, external) | Service |
+| extract-marpit-css.mjs | Build Script | Generate marpit-base-css.ts at build time | 3.1, 3.2 | marp-core (P0, external), marpit (P0, external) | — |
+
+### UI / Routing Layer
+
+#### Slides
+
+| Field | Detail |
+|-------|--------|
+| Intent | Route rendering to MarpSlides (dynamic) or GrowiSlides (static) based on `hasMarpFlag` prop |
+| Requirements | 2.1, 2.2, 2.3 |
+
+**Responsibilities & Constraints**
+- Conditionally render MarpSlides or GrowiSlides based on `hasMarpFlag`
+- Wrap MarpSlides in `<Suspense>` with a loading fallback
+- Must not statically import MarpSlides
+
+**Dependencies**
+- Outbound: MarpSlides — dynamic import for Marp rendering (P1)
+- Outbound: GrowiSlides — static import for non-Marp rendering (P0)
+
+**Contracts**: State [x]
+
+##### State Management
+
+```typescript
+// Updated Slides component signature (unchanged externally)
+type SlidesProps = {
+  options: PresentationOptions;
+  children?: string;
+  hasMarpFlag?: boolean;
+  presentation?: boolean;
+};
+
+// Internal: MarpSlides loaded via React.lazy
+// const MarpSlides = lazy(() => import('./MarpSlides').then(mod => ({ default: mod.MarpSlides })))
+```
+
+- Persistence: None (stateless component)
+- Concurrency: Single render path per props
+
+**Implementation Notes**
+- Integration: Replace static `import { MarpSlides }` with `React.lazy` dynamic import
+- Validation: `hasMarpFlag` is optional boolean; undefined/false → GrowiSlides path
+- Risks: Suspense fallback visible during first MarpSlides load (mitigated by parent-level loading indicators)
+
+### UI / Rendering Layer
+
+#### GrowiSlides
+
+| Field | Detail |
+|-------|--------|
+| Intent | Render non-Marp slides using pre-extracted CSS instead of runtime Marp |
+| Requirements | 1.1, 1.2, 1.3, 1.4 |
+
+**Responsibilities & Constraints**
+- Render slide content via ReactMarkdown with section extraction
+- Apply Marp container CSS from pre-extracted constants (no runtime Marp dependency)
+- Use `MARP_CONTAINER_CLASS_NAME` from shared constants module
+
+**Dependencies**
+- Inbound: Slides — rendering delegation (P0)
+- Outbound: consts/index.ts — `MARP_CONTAINER_CLASS_NAME` (P0)
+- Outbound: consts/marpit-base-css — `SLIDE_MARPIT_CSS`, `PRESENTATION_MARPIT_CSS` (P0)
+
+**Implementation Notes**
+- Integration: Replace `import { ... } from '../services/growi-marpit'` with imports from `../consts` and `../consts/marpit-base-css`
+- Replace `const { css } = marpit.render('')` with `const css = presentation ? PRESENTATION_MARPIT_CSS : SLIDE_MARPIT_CSS`
+
+### Constants Layer
+
+#### marpit-base-css
+
+| Field | Detail |
+|-------|--------|
+| Intent | Provide pre-extracted Marp theme CSS as string constants |
+| Requirements | 1.3, 3.3 |
+
+**Contracts**: State [x]
+
+##### State Management
+
+```typescript
+// Generated file — do not edit manually
+// Regenerate with: node scripts/extract-marpit-css.mjs
+
+export const SLIDE_MARPIT_CSS: string;
+export const PRESENTATION_MARPIT_CSS: string;
+```
+
+- Persistence: Committed to repository; regenerated at build time
+- Consistency: Deterministic output from Marp config; changes only on `@marp-team/marp-core` version update
+
+#### consts/index.ts
+
+| Field | Detail |
+|-------|--------|
+| Intent | Shared constants for presentation package |
+| Requirements | 1.4 |
+
+**Implementation Notes**
+- Add `export const MARP_CONTAINER_CLASS_NAME = 'marpit'` (moved from `growi-marpit.ts`)
+- Existing `PresentationOptions` type export unchanged
+
+### Service Layer
+
+#### growi-marpit.ts
+
+| Field | Detail |
+|-------|--------|
+| Intent | Marp engine setup and instance creation (unchanged behavior, now only reachable via MarpSlides dynamic path) |
+| Requirements | 4.1, 4.3 |
+
+**Contracts**: Service [x]
+
+##### Service Interface
+
+```typescript
+// Existing exports (unchanged)
+export const MARP_CONTAINER_CLASS_NAME: string; // Re-exported from consts for backward compat
+export const slideMarpit: Marp;
+export const presentationMarpit: Marp;
+```
+
+- Preconditions: `@marp-team/marp-core` and `@marp-team/marpit` available
+- Postconditions: Marp instances configured with GROWI options
+- Invariants: Instances are singletons created at module scope
+
+**Implementation Notes**
+- Import `MARP_CONTAINER_CLASS_NAME` from `../consts` instead of defining locally
+- Re-export for backward compatibility (MarpSlides still imports from here)
+
+### Build Script
+
+#### extract-marpit-css.mjs
+
+| Field | Detail |
+|-------|--------|
+| Intent | Generate `marpit-base-css.ts` by running Marp with the same configuration as `growi-marpit.ts` |
+| Requirements | 3.1, 3.2 |
+
+**Responsibilities & Constraints**
+- Replicate the Marp configuration from `growi-marpit.ts` (container classes, inlineSVG, etc.)
+- Generate `slideMarpit.render('')` and `presentationMarpit.render('')` CSS output
+- Write TypeScript file with exported string constants
+- Must run in Node.js ESM without additional tooling (no `tsx`, no `ts-node`)
+
+**Dependencies**
+- External: `@marp-team/marp-core` — Marp rendering engine (P0)
+- External: `@marp-team/marpit` — Element class for container config (P0)
+
+**Implementation Notes**
+- Integration: Added as `"pre:build:src"` script in `package.json`; runs before `vite build`
+- Validation: Script exits with error if CSS extraction produces empty output
+- Risks: Marp options must stay synchronized with `growi-marpit.ts`
+

+ 26 - 0
.kiro/specs/presentation/requirements.md

@@ -0,0 +1,26 @@
+# Presentation Feature — Requirements Overview
+
+## Introduction
+
+The GROWI presentation feature (`@growi/presentation` package) provides slide rendering for wiki pages using frontmatter flags. It supports two rendering modes:
+
+- **GrowiSlides** (`slide: true`): Lightweight slide rendering using ReactMarkdown with Marp container styling applied via pre-extracted CSS constants. Does not load Marp runtime dependencies.
+- **MarpSlides** (`marp: true`): Full Marp-powered slide rendering using `@marp-team/marp-core`, loaded dynamically only when needed.
+
+## Key Requirements
+
+### 1. Module Separation
+
+GrowiSlides renders without loading `@marp-team/marp-core` or `@marp-team/marpit`. Marp dependencies are isolated behind a dynamic import boundary (`React.lazy`) and only loaded for pages with `marp: true`.
+
+### 2. Build-Time CSS Extraction
+
+Marp base CSS is pre-extracted at build time via `extract-marpit-css.mjs`. The generated constants file (`consts/marpit-base-css.ts`) is committed to the repository so that dev mode works without running the extraction script first. The extraction runs automatically before source compilation via the `pre:build:src` script.
+
+### 3. Functional Equivalence
+
+Both Marp and non-Marp slide pages render correctly in inline view and presentation modal. No behavioral differences from the user's perspective.
+
+### 4. Build Integrity
+
+Both `@growi/presentation` and `@growi/app` build successfully. The GrowiSlides build output contains no Marp module references, and the Slides build output contains a dynamic `import()` for MarpSlides.

+ 84 - 0
.kiro/specs/presentation/research.md

@@ -0,0 +1,84 @@
+# Research & Design Decisions
+
+## Summary
+- **Feature**: `optimize-presentation`
+- **Discovery Scope**: Extension
+- **Key Findings**:
+  - `GrowiSlides` imports `growi-marpit.ts` only for CSS extraction (`marpit.render('')`) and a string constant — no Marp rendering is performed
+  - `growi-marpit.ts` instantiates `new Marp(...)` at module scope, pulling in `@marp-team/marp-core` (~524KB) and `@marp-team/marpit` (~372KB) unconditionally
+  - All consumers of `Slides.tsx` are already behind SSR-disabled dynamic boundaries (`next/dynamic`, `useLazyLoader`), making `React.lazy` safe to use within the package
+
+## Research Log
+
+### GrowiSlides dependency on growi-marpit.ts
+- **Context**: Investigating why GrowiSlides requires Marp at all
+- **Sources Consulted**: `packages/presentation/src/client/components/GrowiSlides.tsx`, `packages/presentation/src/client/services/growi-marpit.ts`
+- **Findings**:
+  - `GrowiSlides` imports `MARP_CONTAINER_CLASS_NAME` (string `'marpit'`), `presentationMarpit`, `slideMarpit`
+  - Usage: `const { css } = marpit.render('');` — renders empty string to extract base theme CSS
+  - The CSS output is deterministic (same Marp config → same CSS every time)
+  - `MARP_CONTAINER_CLASS_NAME` is a plain string constant co-located with heavy Marp code
+- **Implications**: GrowiSlides can be fully decoupled by pre-extracting the CSS and moving the constant
+
+### Vite build with preserveModules and React.lazy
+- **Context**: Verifying that React.lazy dynamic import works correctly in the package's Vite build output
+- **Sources Consulted**: `packages/presentation/vite.config.ts`
+- **Findings**:
+  - Build uses `preserveModules: true` with `preserveModulesRoot: 'src'` — each source file → separate output module
+  - `nodeExternals({ devDeps: true })` externalizes `@marp-team/marp-core` and `@marp-team/marpit`
+  - Dynamic `import('./MarpSlides')` in source will produce dynamic `import('./MarpSlides.js')` in output
+  - Next.js (app bundler) handles code-splitting of the externalized Marp packages into async chunks
+- **Implications**: React.lazy in Slides.tsx will produce the correct dynamic import boundary in the build output
+
+### SSR safety of React.lazy
+- **Context**: React.lazy does not support SSR in React 18; need to verify all render paths are client-only
+- **Sources Consulted**: Consumer components in apps/app
+- **Findings**:
+  - `PagePresentationModal`: `useLazyLoader` → client-only
+  - `Presentation` wrapper: `next/dynamic({ ssr: false })`
+  - `SlideRenderer`: `next/dynamic({ ssr: false })` in PageView.tsx
+  - No SSR path exists for `Slides.tsx`
+- **Implications**: React.lazy is safe; no need for `next/dynamic` in the shared package
+
+### CSS extraction approach
+- **Context**: Evaluating how to pre-extract the Marp base CSS
+- **Sources Consulted**: `growi-marpit.ts`, package.json scripts, existing `build:vendor-styles` pattern
+- **Findings**:
+  - `slideMarpit.render('')` and `presentationMarpit.render('')` produce deterministic CSS strings
+  - CSS only changes when `@marp-team/marp-core` is upgraded or Marp options change
+  - The package already has a `build:vendor-styles` script pattern for pre-building CSS assets
+  - `tsx` is not available in the workspace; extraction script must use plain Node.js (.mjs)
+- **Implications**: An .mjs extraction script with dynamic imports from the built package or direct Marp usage is the simplest approach
+
+## Design Decisions
+
+### Decision: Pre-extracted CSS constants vs runtime generation
+- **Context**: GrowiSlides needs Marp theme CSS but should not load Marp runtime
+- **Alternatives Considered**:
+  1. Dynamic import of growi-marpit in GrowiSlides — adds async complexity for a static value
+  2. Pre-extract CSS at build time as string constants — zero runtime cost
+  3. CSS file extracted to dist — requires additional CSS import handling
+- **Selected Approach**: Pre-extract CSS as TypeScript string constants in `consts/marpit-base-css.ts`
+- **Rationale**: The CSS is deterministic; generating it at build time eliminates runtime overhead entirely. TypeScript constants integrate seamlessly with existing import patterns.
+- **Trade-offs**: Requires regeneration on Marp version upgrade (mitigated by `pre:build:src` script)
+- **Follow-up**: Verify CSS output matches runtime generation; commit generated file for dev mode
+
+### Decision: React.lazy vs next/dynamic for MarpSlides
+- **Context**: Need dynamic import boundary for MarpSlides within the shared `@growi/presentation` package
+- **Alternatives Considered**:
+  1. `next/dynamic` — Next.js-specific, couples package to framework
+  2. `React.lazy + Suspense` — standard React, works with any bundler
+- **Selected Approach**: `React.lazy + Suspense`
+- **Rationale**: Although the package already uses `next/head`, `React.lazy` is the standard React pattern for code-splitting and avoids further Next.js coupling. All consumer paths already disable SSR, so React.lazy's SSR limitation is irrelevant.
+- **Trade-offs**: Requires `<Suspense>` wrapper; fallback UI is visible during chunk load
+- **Follow-up**: Verify chunk splitting in Next.js production build
+
+### Decision: Shared constants module for MARP_CONTAINER_CLASS_NAME
+- **Context**: `MARP_CONTAINER_CLASS_NAME` is defined in `growi-marpit.ts` but needed by GrowiSlides without Marp dependency
+- **Selected Approach**: Move to `consts/index.ts` (existing shared constants module)
+- **Rationale**: The constant has no dependency on Marp; co-locating it with Marp code creates an unnecessary transitive import
+
+## Risks & Mitigations
+- **CSS drift on Marp upgrade**: Pre-extracted CSS may become stale → `pre:build:src` script auto-regenerates before every build
+- **Suspense flash on Marp load**: Brief loading indicator when MarpSlides loads → Masked by parent `next/dynamic` loading spinner in most paths
+- **Build script compatibility**: `.mjs` script must work in CI and local dev → Use standard Node.js ESM with no external tooling dependencies

+ 23 - 0
.kiro/specs/presentation/spec.json

@@ -0,0 +1,23 @@
+{
+  "feature_name": "presentation",
+  "created_at": "2026-03-05T12:00:00Z",
+  "updated_at": "2026-03-23T00:00:00Z",
+  "language": "en",
+  "phase": "implementation-complete",
+  "cleanup_completed": true,
+  "approvals": {
+    "requirements": {
+      "generated": true,
+      "approved": true
+    },
+    "design": {
+      "generated": true,
+      "approved": true
+    },
+    "tasks": {
+      "generated": true,
+      "approved": true
+    }
+  },
+  "ready_for_implementation": true
+}

+ 262 - 0
.kiro/specs/upgrade-fixed-packages/design.md

@@ -0,0 +1,262 @@
+# Design Document: upgrade-fixed-packages
+
+## Overview
+
+**Purpose**: This feature audits and upgrades version-pinned packages in `apps/app/package.json` that were frozen due to upstream bugs, ESM-only migrations, or licensing constraints. The build environment has shifted from webpack to Turbopack, and the runtime now targets Node.js 24 with stable `require(esm)` support, invalidating several original pinning reasons.
+
+**Users**: Maintainers and developers benefit from up-to-date dependencies with bug fixes, security patches, and reduced technical debt.
+
+**Impact**: Modifies `apps/app/package.json` dependency versions and comment blocks; touches source files where `escape-string-regexp` is replaced by native `RegExp.escape()`.
+
+### Goals
+- Verify each pinning reason against current upstream status
+- Upgrade packages where the original constraint no longer applies
+- Replace `escape-string-regexp` with native `RegExp.escape()` (Node.js 24)
+- Update or remove comment blocks to reflect current state
+- Produce audit documentation for future reference
+
+### Non-Goals
+- Replacing handsontable with an alternative library (license constraint remains; replacement is a separate initiative)
+- Upgrading `@keycloak/keycloak-admin-client` to v19+ (significant API breaking changes; deferred to separate task)
+- Major version upgrades of unrelated packages
+- Modifying the build pipeline or Turbopack configuration
+
+## Architecture
+
+This is a dependency maintenance task, not a feature implementation. No new components or architectural changes are introduced.
+
+### Existing Architecture Analysis
+
+The pinned packages fall into distinct categories by their usage context:
+
+| Category | Packages | Build Context |
+|----------|----------|---------------|
+| Server-only (tsc → CJS) | `escape-string-regexp`, `@aws-sdk/*`, `@keycloak/*` | Express server compiled by tsc |
+| Client-only (Turbopack) | `string-width` (via @growi/editor), `bootstrap` | Bundled by Turbopack/Vite |
+| Client + SSR | `next-themes` | Turbopack + SSR rendering |
+| License-pinned | `handsontable`, `@handsontable/react` | Client-only |
+
+Key enabler: Node.js ^24 provides stable `require(esm)` support, removing the fundamental CJS/ESM incompatibility that caused several pins.
+
+### Technology Stack
+
+| Layer | Choice / Version | Role in Feature | Notes |
+|-------|------------------|-----------------|-------|
+| Runtime | Node.js ^24 | Enables `require(esm)` and `RegExp.escape()` | ES2026 Stage 4 features available |
+| Build (client) | Turbopack (Next.js 16) | Bundles ESM-only packages without issues | No changes needed |
+| Build (server) | tsc (CommonJS output) | `require(esm)` handles ESM-only imports | Node.js 24 native support |
+| Package manager | pnpm v10 | Manages dependency resolution | No changes needed |
+
+## System Flows
+
+### Upgrade Verification Flow
+
+```mermaid
+flowchart TD
+    Start[Select package to upgrade] --> Update[Update version in package.json]
+    Update --> Install[pnpm install]
+    Install --> Build{turbo run build}
+    Build -->|Pass| Lint{turbo run lint}
+    Build -->|Fail| Revert[Revert package change]
+    Lint -->|Pass| Test{turbo run test}
+    Lint -->|Fail| Revert
+    Test -->|Pass| Verify[Verify .next/node_modules symlinks]
+    Test -->|Fail| Revert
+    Verify --> Next[Proceed to next package]
+    Revert --> Document[Document failure reason]
+    Document --> Next
+```
+
+Each package is upgraded and verified independently. Failures are isolated and reverted without affecting other upgrades.
+
+## Requirements Traceability
+
+| Requirement | Summary | Components | Action |
+|-------------|---------|------------|--------|
+| 1.1 | Bootstrap bug investigation | PackageAudit | Verify #39798 fixed in v5.3.4 |
+| 1.2 | next-themes issue investigation | PackageAudit | Verify #122 resolved; check v0.4.x compatibility |
+| 1.3 | @aws-sdk constraint verification | PackageAudit | Confirm mongodb constraint is on different package |
+| 1.4 | Document investigation results | AuditReport | Summary table in research.md |
+| 2.1 | ESM compatibility per package | PackageAudit | Assess escape-string-regexp, string-width, @keycloak |
+| 2.2 | Server build ESM support | PackageAudit | Verify Node.js 24 require(esm) for server context |
+| 2.3 | Client build ESM support | PackageAudit | Confirm Turbopack handles ESM-only packages |
+| 2.4 | Compatibility matrix | AuditReport | Table in research.md |
+| 3.1 | Handsontable license check | PackageAudit | Confirm v7+ still non-MIT |
+| 3.2 | Document pinning requirement | AuditReport | Note in audit summary |
+| 4.1 | Update package.json versions and comments | UpgradeExecution | Modify versions and comment blocks |
+| 4.2 | Build verification | UpgradeExecution | `turbo run build --filter @growi/app` |
+| 4.3 | Lint verification | UpgradeExecution | `turbo run lint --filter @growi/app` |
+| 4.4 | Test verification | UpgradeExecution | `turbo run test --filter @growi/app` |
+| 4.5 | Revert on failure | UpgradeExecution | Git revert per package |
+| 4.6 | Update comment blocks | UpgradeExecution | Remove or update comments |
+| 5.1 | Audit summary table | AuditReport | Final summary with decisions |
+| 5.2 | Document continued pinning | AuditReport | Reasons for remaining pins |
+| 5.3 | Document upgrade rationale | AuditReport | What changed upstream |
+
+## Components and Interfaces
+
+| Component | Domain | Intent | Req Coverage | Key Dependencies |
+|-----------|--------|--------|--------------|------------------|
+| PackageAudit | Investigation | Research upstream status for each pinned package | 1.1–1.4, 2.1–2.4, 3.1–3.2 | GitHub issues, npm registry |
+| UpgradeExecution | Implementation | Apply version changes and verify build | 4.1–4.6 | pnpm, turbo, tsc |
+| SourceMigration | Implementation | Replace escape-string-regexp with RegExp.escape() | 4.1 | 9 source files |
+| AuditReport | Documentation | Produce summary of all decisions | 5.1–5.3 | research.md |
+
+### Investigation Layer
+
+#### PackageAudit
+
+| Field | Detail |
+|-------|--------|
+| Intent | Investigate upstream status of each pinned package and determine upgrade feasibility |
+| Requirements | 1.1, 1.2, 1.3, 1.4, 2.1, 2.2, 2.3, 2.4, 3.1, 3.2 |
+
+**Responsibilities & Constraints**
+- Check upstream issue trackers for bug fix status
+- Verify ESM compatibility against Node.js 24 `require(esm)` and Turbopack
+- Confirm license status for handsontable
+- Produce actionable recommendation per package
+
+**Audit Decision Matrix**
+
+| Package | Current | Action | Target | Risk | Rationale |
+|---------|---------|--------|--------|------|-----------|
+| `bootstrap` | `=5.3.2` | Upgrade | `^5.3.4` | Low | Bug #39798 fixed in v5.3.4 |
+| `next-themes` | `^0.2.1` | Upgrade | `^0.4.4` | Medium | Original issue was misattributed; v0.4.x works with Pages Router |
+| `escape-string-regexp` | `^4.0.0` | Replace | Remove dep | Low | Native `RegExp.escape()` in Node.js 24 |
+| `string-width` | `=4.2.2` | Upgrade | `^7.0.0` | Low | Used only in ESM context (@growi/editor) |
+| `@aws-sdk/client-s3` | `3.454.0` | Relax | `^3.454.0` | Low | Pinning comment was misleading |
+| `@aws-sdk/s3-request-presigner` | `3.454.0` | Relax | `^3.454.0` | Low | Same as above |
+| `@keycloak/keycloak-admin-client` | `^18.0.0` | Defer | No change | N/A | API breaking changes; separate task |
+| `handsontable` | `=6.2.2` | Keep | No change | N/A | License constraint (non-MIT since v7) |
+| `@handsontable/react` | `=2.1.0` | Keep | No change | N/A | Requires handsontable >= 7 |
+
+### Implementation Layer
+
+#### UpgradeExecution
+
+| Field | Detail |
+|-------|--------|
+| Intent | Apply version changes incrementally with build verification |
+| Requirements | 4.1, 4.2, 4.3, 4.4, 4.5, 4.6 |
+
+**Responsibilities & Constraints**
+- Upgrade one package at a time to isolate failures
+- Run full verification suite (build, lint, test) after each change
+- Revert and document any package that causes failures
+- Update `// comments for dependencies` block to reflect new state
+
+**Upgrade Order** (lowest risk first):
+1. `@aws-sdk/*` — relax version range (no code changes)
+2. `string-width` — upgrade in @growi/editor (isolated ESM package)
+3. `bootstrap` — upgrade to ^5.3.4 (verify SCSS compilation)
+4. `escape-string-regexp` → `RegExp.escape()` — source code changes across 9 files
+5. `next-themes` — upgrade to ^0.4.x (review API changes across 12 files)
+
+**Implementation Notes**
+- After each upgrade, verify `.next/node_modules/` symlinks for Turbopack externalisation compliance (per `package-dependencies` rule)
+- For bootstrap: run `pnpm run pre:styles-commons` and `pnpm run pre:styles-components` to verify SCSS compilation
+- For next-themes: review v0.3.0 and v0.4.0 changelogs for breaking API changes before modifying code
+
+#### SourceMigration
+
+| Field | Detail |
+|-------|--------|
+| Intent | Replace all `escape-string-regexp` usage with native `RegExp.escape()` |
+| Requirements | 4.1 |
+
+**Files to Modify**:
+
+`apps/app/src/` (6 files):
+- `server/models/page.ts`
+- `server/service/page/index.ts`
+- `server/service/page-grant.ts`
+- `server/routes/apiv3/users.js`
+- `server/models/obsolete-page.js`
+- `features/openai/server/services/openai.ts`
+
+`packages/` (3 files):
+- `packages/core/src/utils/page-path-utils/` (2 files)
+- `packages/remark-lsx/src/server/routes/list-pages/index.ts`
+
+**Migration Pattern**:
+```typescript
+// Before
+import escapeStringRegexp from 'escape-string-regexp';
+const pattern = new RegExp(escapeStringRegexp(input));
+
+// After
+const pattern = new RegExp(RegExp.escape(input));
+```
+
+**Implementation Notes**
+- Remove `escape-string-regexp` from `apps/app/package.json` dependencies after migration
+- Remove from `packages/core/package.json` and `packages/remark-lsx/package.json` if listed
+- Verify `RegExp.escape()` TypeScript types are available (may need `@types/node` update or lib config)
+
+### Documentation Layer
+
+#### AuditReport
+
+| Field | Detail |
+|-------|--------|
+| Intent | Document all audit decisions for future maintainers |
+| Requirements | 5.1, 5.2, 5.3 |
+
+**Deliverables**:
+- Updated `// comments for dependencies` in package.json (only retained pins with current reasons)
+- Updated `// comments for defDependencies` (handsontable entries unchanged)
+- Summary in research.md with final decision per package
+
+**Updated Comment Blocks** (target state):
+
+```json
+{
+  "// comments for dependencies": {
+    "@keycloak/keycloak-admin-client": "19.0.0 or above exports only ESM. API breaking changes require separate migration effort.",
+    "next-themes": "(if upgrade fails) Document specific failure reason here"
+  },
+  "// comments for defDependencies": {
+    "@handsontable/react": "v3 requires handsontable >= 7.0.0.",
+    "handsontable": "v7.0.0 or above is no longer MIT license."
+  }
+}
+```
+
+Note: The exact final state depends on which upgrades succeed. If all planned upgrades pass, only `@keycloak` and `handsontable` entries remain.
+
+## Testing Strategy
+
+### Build Verification (per package)
+- `turbo run build --filter @growi/app` — Turbopack client build + tsc server build
+- `ls apps/app/.next/node_modules/ | grep <package>` — Externalisation check
+- `pnpm run pre:styles-commons` — SCSS compilation (bootstrap only)
+
+### Lint Verification (per package)
+- `turbo run lint --filter @growi/app` — TypeScript type check + Biome
+
+### Unit/Integration Tests (per package)
+- `turbo run test --filter @growi/app` — Full test suite
+- For `RegExp.escape()` migration: run tests for page model, page service, page-grant service specifically
+
+### Regression Verification (final)
+- Full build + lint + test after all upgrades applied together
+- Verify `.next/node_modules/` symlink integrity via `check-next-symlinks.sh` (if available locally)
+
+## Migration Strategy
+
+```mermaid
+flowchart LR
+    Phase1[Phase 1: Low Risk] --> Phase2[Phase 2: Medium Risk]
+    Phase1 --> P1a[aws-sdk relax range]
+    Phase1 --> P1b[string-width upgrade]
+    Phase2 --> P2a[bootstrap upgrade]
+    Phase2 --> P2b[escape-string-regexp replace]
+    Phase2 --> P2c[next-themes upgrade]
+```
+
+- **Phase 1** (low risk): @aws-sdk range relaxation, string-width upgrade — minimal code changes
+- **Phase 2** (medium risk): bootstrap, escape-string-regexp replacement, next-themes — requires code review and/or source changes
+- Each upgrade is independently revertible
+- Deferred: @keycloak (high risk, separate task)
+- No change: handsontable (license constraint)

+ 75 - 0
.kiro/specs/upgrade-fixed-packages/requirements.md

@@ -0,0 +1,75 @@
+# Requirements Document
+
+## Introduction
+
+The `apps/app/package.json` file contains several packages whose versions are intentionally pinned due to ESM-only upgrades, upstream bugs, or licensing concerns. These pinning reasons were documented in `// comments for dependencies` and `// comments for defDependencies` comment blocks. Since the build environment has significantly changed (webpack → Turbopack), and upstream issues may have been resolved, a systematic audit is needed to determine which packages can now be safely upgraded.
+
+### Pinned Packages Inventory
+
+| # | Package | Current Version | Pinning Reason |
+|---|---------|----------------|----------------|
+| 1 | `@aws-sdk/client-s3`, `@aws-sdk/s3-request-presigner` | `3.454.0` | Fix version above 3.186.0 required by mongodb@4.16.0 |
+| 2 | `@keycloak/keycloak-admin-client` | `^18.0.0` | 19.0.0+ exports only ESM |
+| 3 | `bootstrap` | `=5.3.2` | v5.3.3 has a bug (twbs/bootstrap#39798) |
+| 4 | `escape-string-regexp` | `^4.0.0` | 5.0.0+ exports only ESM |
+| 5 | `next-themes` | `^0.2.1` | 0.3.0 causes type error (pacocoursey/next-themes#122) |
+| 6 | `string-width` | `=4.2.2` | 5.0.0+ exports only ESM |
+| 7 | `@handsontable/react` | `=2.1.0` | v3 requires handsontable >= 7.0.0 |
+| 8 | `handsontable` | `=6.2.2` | v7.0.0+ is no longer MIT license |
+
+## Requirements
+
+### Requirement 1: Upstream Bug and Issue Investigation
+
+**Objective:** As a maintainer, I want to verify whether upstream bugs and issues that originally caused version pinning have been resolved, so that I can make informed upgrade decisions.
+
+#### Acceptance Criteria
+
+1. When investigating the bootstrap pinning, the audit process shall check the current status of https://github.com/twbs/bootstrap/issues/39798 and determine whether v5.3.3+ has fixed the reported bug.
+2. When investigating the next-themes pinning, the audit process shall check the current status of https://github.com/pacocoursey/next-themes/issues/122 and determine whether v0.3.0+ has resolved the type error.
+3. When investigating the @aws-sdk pinning, the audit process shall verify whether the mongodb version used in GROWI still requires the `>=3.186.0` constraint and whether the latest @aws-sdk versions are compatible.
+4. The audit process shall document the investigation result for each package, including: current upstream status, whether the original issue is resolved, and the recommended action (upgrade/keep/replace).
+
+### Requirement 2: ESM-Only Package Compatibility Assessment
+
+**Objective:** As a maintainer, I want to assess whether ESM-only versions of pinned packages are now compatible with the current Turbopack-based build environment, so that outdated CJS-only constraints can be removed.
+
+#### Acceptance Criteria
+
+1. When assessing ESM compatibility, the audit process shall evaluate each ESM-pinned package (`escape-string-regexp`, `string-width`, `@keycloak/keycloak-admin-client`) against the current build pipeline (Turbopack for client, tsc for server).
+2. When a package is used in server-side code (transpiled via tsc with `tsconfig.build.server.json`), the audit process shall verify whether the server build output format (CJS or ESM) supports importing ESM-only packages.
+3. When a package is used only in client-side code (bundled via Turbopack), the audit process shall confirm that Turbopack can resolve ESM-only packages without issues.
+4. The audit process shall produce a compatibility matrix showing each ESM-pinned package, its usage context (server/client/both), and whether upgrading to the ESM-only version is feasible.
+
+### Requirement 3: License Compliance Verification
+
+**Objective:** As a maintainer, I want to confirm that the handsontable/`@handsontable/react` licensing situation has not changed, so that I can determine whether these packages must remain pinned or can be replaced.
+
+#### Acceptance Criteria
+
+1. When evaluating handsontable, the audit process shall verify the current license of handsontable v7.0.0+ and confirm whether it remains non-MIT.
+2. If handsontable v7.0.0+ is still non-MIT, the audit process shall document that `handsontable` (`=6.2.2`) and `@handsontable/react` (`=2.1.0`) must remain pinned or an alternative library must be identified.
+3. If a MIT-licensed alternative to handsontable exists, the audit process shall note it as a potential replacement candidate (out of scope for this spec but documented for future work).
+
+### Requirement 4: Safe Upgrade Execution
+
+**Objective:** As a maintainer, I want to upgrade packages that are confirmed safe to update, so that the project benefits from bug fixes, security patches, and new features.
+
+#### Acceptance Criteria
+
+1. When upgrading a pinned package, the upgrade process shall update the version specifier in `apps/app/package.json` and remove or update the corresponding entry in the `// comments for dependencies` or `// comments for defDependencies` block.
+2. When a package is upgraded, the upgrade process shall verify that `turbo run build --filter @growi/app` completes successfully.
+3. When a package is upgraded, the upgrade process shall verify that `turbo run lint --filter @growi/app` completes without new errors.
+4. When a package is upgraded, the upgrade process shall verify that `turbo run test --filter @growi/app` passes without new failures.
+5. If a package upgrade causes build, lint, or test failures, the upgrade process shall revert that specific package change and document the failure reason.
+6. When all upgrades are complete, the `// comments for dependencies` and `// comments for defDependencies` blocks shall accurately reflect only the packages that remain pinned, with updated reasons if applicable.
+
+### Requirement 5: Audit Documentation
+
+**Objective:** As a maintainer, I want a clear record of the audit results, so that future maintainers understand which packages were evaluated and why decisions were made.
+
+#### Acceptance Criteria
+
+1. The audit process shall produce a summary table documenting each pinned package with: package name, previous version, new version (or "unchanged"), and rationale for the decision.
+2. When a package remains pinned, the documentation shall include the verified reason for continued pinning.
+3. When a package is upgraded, the documentation shall note what changed upstream that made the upgrade possible.

+ 183 - 0
.kiro/specs/upgrade-fixed-packages/research.md

@@ -0,0 +1,183 @@
+# Research & Design Decisions
+
+---
+**Purpose**: Capture discovery findings for the pinned package audit and upgrade initiative.
+**Usage**: Inform design.md decisions; provide evidence for future maintainers.
+---
+
+## Summary
+- **Feature**: `upgrade-fixed-packages`
+- **Discovery Scope**: Extension (auditing existing dependency constraints)
+- **Key Findings**:
+  - Bootstrap bug (#39798) fixed in v5.3.4 — safe to upgrade to latest 5.3.x
+  - next-themes original issue (#122) was resolved long ago; upgrade to v0.4.x feasible but has Next.js 16 `cacheComponents` caveat
+  - Node.js ^24 enables stable `require(esm)`, unlocking ESM-only package upgrades for server code
+  - `escape-string-regexp` can be replaced entirely by native `RegExp.escape()` (ES2026, Node.js 24)
+  - handsontable license situation unchanged — must remain pinned at 6.2.2
+  - @aws-sdk pinning comment is misleading; packages can be freely upgraded
+
+## Research Log
+
+### Bootstrap v5.3.3 Bug (#39798)
+- **Context**: bootstrap pinned at `=5.3.2` due to modal header regression in v5.3.3
+- **Sources Consulted**: https://github.com/twbs/bootstrap/issues/39798, https://github.com/twbs/bootstrap/pull/41336
+- **Findings**:
+  - Issue CLOSED on 2025-04-03
+  - Fixed in v5.3.4 via PR #41336 (Fix modal and offcanvas header collapse)
+  - Bug: `.modal-header` lost `justify-content: space-between`, causing content collapse
+  - Latest stable: v5.3.8 (August 2025)
+- **Implications**: Safe to upgrade from `=5.3.2` to `^5.3.4`. Skip v5.3.3 entirely. Recommend `^5.3.4` or pin to latest `=5.3.8`.
+
+### next-themes Type Error (#122)
+- **Context**: next-themes pinned at `^0.2.1` due to reported type error in v0.3.0
+- **Sources Consulted**: https://github.com/pacocoursey/next-themes/issues/122, https://github.com/pacocoursey/next-themes/issues/375
+- **Findings**:
+  - Issue #122 CLOSED on 2022-06-02 — was specific to an old beta version (v0.0.13-beta.3), not v0.3.0
+  - The pinning reason was based on incomplete information; v0.2.0+ already had the fix
+  - Latest: v0.4.6 (March 2025). Peers: `react ^16.8 || ^17 || ^18 || ^19`
+  - **Caveat**: Issue #375 reports a bug with Next.js 16's `cacheComponents` feature — stale theme values when cached components reactivate
+  - PR #377 in progress to fix via `useSyncExternalStore`
+  - Without `cacheComponents`, v0.4.6 works fine with Next.js 16
+- **Implications**: Upgrade to v0.4.x is feasible. GROWI uses Pages Router (not App Router), so `cacheComponents` is likely not relevant. Breaking API changes between v0.2 → v0.4 need review. Used in 12 files across apps/app.
+
+### ESM-only Package Compatibility (escape-string-regexp, string-width, @keycloak)
+- **Context**: Three packages pinned to CJS-compatible versions because newer versions are ESM-only
+- **Sources Consulted**: Node.js v22.12.0 release notes (require(esm) enabled by default), TC39 RegExp.escape Stage 4, sindresorhus ESM guidance, npm package pages
+- **Findings**:
+
+  **escape-string-regexp** (^4.0.0):
+  - Used in 6 server-side files + 3 shared package files (all server context)
+  - Node.js 24 has stable `require(esm)` — ESM-only v5 would work
+  - **Better**: `RegExp.escape()` is ES2026 Stage 4, natively available in Node.js 24 (V8 support)
+  - Can eliminate the dependency entirely
+
+  **string-width** (=4.2.2):
+  - Used only in `packages/editor/src/models/markdown-table.js`
+  - `@growi/editor` has `"type": "module"` and builds with Vite (ESM context)
+  - No server-side value imports (only type imports in `sync-ydoc.ts`, erased at compile)
+  - Safe to upgrade to v7.x
+
+  **@keycloak/keycloak-admin-client** (^18.0.0):
+  - Used in 1 server-side file: `features/external-user-group/server/service/keycloak-user-group-sync.ts`
+  - Latest: v26.5.5 (February 2026)
+  - `require(esm)` in Node.js 24 should handle it, but API has significant breaking changes (v18 → v26)
+  - Sub-path exports need verification
+  - Higher risk upgrade — API surface changes expected
+
+- **Implications**: string-width is the easiest upgrade. escape-string-regexp should be replaced by native `RegExp.escape()`. @keycloak requires careful API migration and is higher risk.
+
+### @aws-sdk Pinning Analysis
+- **Context**: @aws-sdk/client-s3 and @aws-sdk/s3-request-presigner pinned at 3.454.0
+- **Sources Consulted**: mongodb package.json, npm registry, GROWI source code
+- **Findings**:
+  - Pinning comment says "required by mongodb@4.16.0" but is misleading
+  - mongodb@4.17.2 has `@aws-sdk/credential-providers: ^3.186.0` as **optional** dependency — a different package
+  - The S3 client packages are used directly by GROWI for file upload (server/service/file-uploader/aws/)
+  - Latest: @aws-sdk/client-s3@3.1014.0 (March 2026) — over 500 versions behind
+  - AWS SDK v3 follows semver; any 3.x should be compatible
+- **Implications**: Remove the misleading comment. Change from exact `3.454.0` to `^3.454.0` or update to latest. Low risk.
+
+### Handsontable License Status
+- **Context**: handsontable pinned at =6.2.2 (last MIT version), @handsontable/react at =2.1.0
+- **Sources Consulted**: handsontable.com/docs/software-license, npm, Hacker News discussion
+- **Findings**:
+  - v7.0.0+ (March 2019) switched from MIT to proprietary license — unchanged as of 2026
+  - Free "Hobby" license exists but restricted to non-commercial personal use
+  - Commercial use requires paid subscription
+  - MIT alternatives: AG Grid Community (most mature), Jspreadsheet CE, Univer (Apache 2.0)
+- **Implications**: Must remain pinned. No action possible without license purchase or library replacement. Library replacement is out of scope for this spec.
+
+## Design Decisions
+
+### Decision: Replace escape-string-regexp with native RegExp.escape()
+- **Context**: escape-string-regexp v5 is ESM-only; used in 9 files across server code
+- **Alternatives Considered**:
+  1. Upgrade to v5 with require(esm) support — works but adds unnecessary dependency
+  2. Replace with native `RegExp.escape()` — zero dependencies, future-proof
+- **Selected Approach**: Replace with `RegExp.escape()`
+- **Rationale**: Node.js 24 supports `RegExp.escape()` natively (ES2026 Stage 4). Eliminates a dependency entirely.
+- **Trade-offs**: Requires touching 9 files, but changes are mechanical (find-and-replace)
+- **Follow-up**: Verify `RegExp.escape()` is available in the project's Node.js 24 target
+
+### Decision: Upgrade string-width directly to v7.x
+- **Context**: Used only in @growi/editor (ESM package, Vite-bundled, client-only)
+- **Selected Approach**: Direct upgrade to latest v7.x
+- **Rationale**: Consumer is already ESM; zero CJS concern
+- **Trade-offs**: None significant; API is stable
+
+### Decision: Upgrade bootstrap to ^5.3.4
+- **Context**: Bug fixed in v5.3.4; latest is 5.3.8
+- **Selected Approach**: Change from `=5.3.2` to `^5.3.4`
+- **Rationale**: Original bug resolved; skip v5.3.3
+- **Trade-offs**: Need to verify GROWI's custom SCSS and modal usage against 5.3.4+ changes
+
+### Decision: Upgrade next-themes to latest 0.4.x
+- **Context**: Original issue was a misunderstanding; latest is v0.4.6
+- **Selected Approach**: Upgrade to `^0.4.4` (or latest)
+- **Rationale**: Issue #122 was specific to old beta, not v0.3.0. GROWI uses Pages Router, so cacheComponents bug is not relevant.
+- **Trade-offs**: Breaking API changes between v0.2 → v0.4 need review. 12 files import from next-themes.
+- **Follow-up**: Review v0.3.0 and v0.4.0 changelogs for breaking changes
+
+### Decision: Relax @aws-sdk version to caret range
+- **Context**: Pinning was based on misleading comment; packages are independent of mongodb constraint
+- **Selected Approach**: Change from `3.454.0` to `^3.454.0`
+- **Rationale**: AWS SDK v3 follows semver; the comment conflated credential-providers with S3 client
+- **Trade-offs**: Low risk. Conservative approach keeps minimum at 3.454.0.
+
+### Decision: Defer @keycloak upgrade (high risk)
+- **Context**: v18 → v26 has significant API breaking changes; only 1 file affected
+- **Selected Approach**: Document as upgradeable but defer to a separate task
+- **Rationale**: API migration requires Keycloak server compatibility testing; out of proportion for a batch upgrade task
+- **Trade-offs**: Remains on old version longer, but isolated to one feature
+
+### Decision: Keep handsontable pinned (license constraint)
+- **Context**: v7+ is proprietary; no free alternative that's drop-in
+- **Selected Approach**: No change. Document for future reference.
+- **Rationale**: License constraint is permanent unless library is replaced entirely
+- **Trade-offs**: None — this is a business/legal decision, not technical
+
+## Risks & Mitigations
+- **Bootstrap SCSS breakage**: v5.3.4+ may have SCSS variable changes → Run `pre:styles-commons` and `pre:styles-components` builds to verify
+- **next-themes API changes**: v0.2 → v0.4 has breaking changes → Review changelog; test all 12 consuming files
+- **RegExp.escape() availability**: Ensure Node.js 24 V8 includes it → Verify with simple runtime test
+- **@aws-sdk transitive dependency changes**: Newer AWS SDK may pull different transitive deps → Monitor bundle size
+- **Build regression**: Any upgrade could break Turbopack build → Follow incremental upgrade strategy with build verification per package
+
+## Future Considerations (Out of Scope)
+
+### transpilePackages cleanup in next.config.ts
+- **Context**: `next.config.ts` defines `getTranspilePackages()` listing 60+ ESM-only packages to force Turbopack to bundle them instead of externalising. The original comment says: "listing ESM packages until experimental.esmExternals works correctly to avoid ERR_REQUIRE_ESM".
+- **Relationship to require(esm)**: `transpilePackages` and `require(esm)` solve different problems. `transpilePackages` prevents Turbopack from externalising packages during SSR; `require(esm)` allows Node.js to load ESM packages via `require()` at runtime. With Node.js 24's stable `require(esm)`, externalised ESM packages *should* load correctly in SSR, meaning some `transpilePackages` entries may become unnecessary.
+- **Why not now**: (1) Turbopack's `esmExternals` handling is still `experimental`; (2) removing entries shifts packages from bundled to externalised, which means they appear in `.next/node_modules/` and must be classified as `dependencies` per the `package-dependencies` rule; (3) 60+ packages need individual verification. This is a separate investigation with a large blast radius.
+- **Recommendation**: Track as a separate task. Test by removing a few low-risk entries (e.g., `bail`, `ccount`, `zwitch`) and checking whether SSR still works with Turbopack externalisation + Node.js 24 `require(esm)`.
+
+## References
+- [Bootstrap issue #39798](https://github.com/twbs/bootstrap/issues/39798) — modal header regression, fixed in v5.3.4
+- [next-themes issue #122](https://github.com/pacocoursey/next-themes/issues/122) — type error, resolved in v0.2.0
+- [next-themes issue #375](https://github.com/pacocoursey/next-themes/issues/375) — Next.js 16 cacheComponents bug
+- [TC39 RegExp.escape() Stage 4](https://socket.dev/blog/tc39-advances-3-proposals-to-stage-4-regexp-escaping-float16array-and-redeclarable-global-eval) — ES2026
+- [Node.js require(esm) stability](https://joyeecheung.github.io/blog/2025/12/30/require-esm-in-node-js-from-experiment-to-stability/) — stable since Node.js 22.12.0
+- [Handsontable license change](https://handsontable.com/docs/javascript-data-grid/software-license/) — proprietary since v7.0.0
+
+## Final Audit Summary (2026-03-23)
+
+| Package | Previous Version | New Version | Action | Rationale |
+|---------|-----------------|-------------|--------|-----------|
+| `@aws-sdk/client-s3` | `3.454.0` | `^3.1014.0` | Upgraded | Pinning comment was misleading; S3 client is independent of mongodb constraint |
+| `@aws-sdk/s3-request-presigner` | `3.454.0` | `^3.1014.0` | Upgraded | Same as above |
+| `bootstrap` | `=5.3.2` | `^5.3.8` | Upgraded | Bug #39798 fixed in v5.3.4; SCSS compilation verified |
+| `escape-string-regexp` | `^4.0.0` | Removed | Replaced | Native `RegExp.escape()` (ES2026, Node.js 24) eliminates the dependency |
+| `string-width` | `=4.2.2` | `^7.0.0` | Upgraded | Used only in @growi/editor (ESM context, Vite-bundled) |
+| `next-themes` | `^0.2.1` | `^0.4.6` | Upgraded | Original issue #122 was misattributed; only change needed: type import path |
+| `@keycloak/keycloak-admin-client` | `^18.0.0` | Unchanged | Deferred | API breaking changes (v18→v26) require separate migration effort |
+| `handsontable` | `=6.2.2` | Unchanged | Kept | v7.0.0+ is proprietary (non-MIT license) |
+| `@handsontable/react` | `=2.1.0` | Unchanged | Kept | Requires handsontable >= 7.0.0 |
+
+### Additional Changes
+
+- Added `RegExp.escape()` TypeScript type declarations in `apps/app/src/@types/`, `packages/core/src/@types/`, and `packages/remark-lsx/src/@types/` (awaiting TypeScript built-in support)
+- Updated `tsconfig.build.client.json` to include `src/@types/**/*.d.ts` for Next.js build compatibility
+- Updated `generate-children-regexp.spec.ts` test expectations for `RegExp.escape()` output (escapes spaces as `\x20`)
+- Removed `escape-string-regexp` from `transpilePackages` in `next.config.ts`
+- Updated `bootstrap` version across 5 packages: apps/app, packages/editor, packages/core-styles, packages/preset-themes, apps/slackbot-proxy
+- Updated `// comments for dependencies` to retain only `@keycloak` entry with updated reason

+ 22 - 0
.kiro/specs/upgrade-fixed-packages/spec.json

@@ -0,0 +1,22 @@
+{
+  "feature_name": "upgrade-fixed-packages",
+  "created_at": "2026-03-23T00:00:00Z",
+  "updated_at": "2026-03-23T00:00:00Z",
+  "language": "en",
+  "phase": "implementation-complete",
+  "approvals": {
+    "requirements": {
+      "generated": true,
+      "approved": true
+    },
+    "design": {
+      "generated": true,
+      "approved": true
+    },
+    "tasks": {
+      "generated": true,
+      "approved": true
+    }
+  },
+  "ready_for_implementation": true
+}

+ 89 - 0
.kiro/specs/upgrade-fixed-packages/tasks.md

@@ -0,0 +1,89 @@
+# Implementation Plan
+
+- [x] 1. Pre-implementation verification
+- [x] 1.1 Verify RegExp.escape() availability and TypeScript support
+  - Confirm `RegExp.escape()` is available at runtime in the project's Node.js 24 target
+  - Check whether TypeScript recognizes `RegExp.escape()` — may need `lib` config update or `@types/node` update
+  - If unavailable, fall back to upgrading `escape-string-regexp` to v5 with `require(esm)` instead
+  - _Requirements: 2.2_
+
+- [x] 1.2 Review next-themes v0.3.0 and v0.4.0 breaking API changes
+  - Read changelogs for v0.3.0 and v0.4.0 releases to identify breaking changes
+  - Map breaking changes to the 12 consuming files in apps/app
+  - Determine migration effort and document required code changes
+  - Confirm GROWI's Pages Router usage is unaffected by the cacheComponents bug (issue #375)
+  - _Requirements: 1.2_
+
+- [x] 2. Low-risk package upgrades
+- [x] 2.1 (P) Relax @aws-sdk version range
+  - Change `@aws-sdk/client-s3` from `3.454.0` to `^3.1014.0` in apps/app/package.json
+  - Change `@aws-sdk/s3-request-presigner` from `3.454.0` to `^3.1014.0`
+  - Update the misleading `"@aws-skd/*"` comment to reflect the actual reason or remove it
+  - Run `pnpm install` and verify build with `turbo run build --filter @growi/app`
+  - Run `turbo run test --filter @growi/app` to confirm no regressions
+  - _Requirements: 1.3, 4.1, 4.2, 4.4_
+
+- [x] 2.2 (P) Upgrade string-width in @growi/editor
+  - Update `string-width` from `=4.2.2` to `^7.0.0` in packages/editor/package.json
+  - Verify @growi/editor builds successfully (Vite, ESM context)
+  - Run `turbo run build --filter @growi/app` to confirm downstream build passes
+  - Run `turbo run test --filter @growi/app` to confirm no regressions
+  - Remove the `string-width` comment from apps/app/package.json `// comments for dependencies`
+  - _Requirements: 2.1, 2.3, 4.1, 4.2, 4.4_
+
+- [x] 3. Upgrade bootstrap to ^5.3.8
+  - Change `bootstrap` from `=5.3.2` to `^5.3.8` in apps/app/package.json and all other packages
+  - Run `pnpm install` to resolve the new version
+  - Run `pnpm run pre:styles-commons` and `pnpm run pre:styles-components` to verify SCSS compilation
+  - Run `turbo run build --filter @growi/app` to confirm Turbopack build passes
+  - Run `turbo run lint --filter @growi/app` to check for type or lint errors
+  - Run `turbo run test --filter @growi/app` to confirm no regressions
+  - Visually inspect modal headers if a dev server is available (original bug was modal header layout)
+  - Remove the `bootstrap` comment from `// comments for dependencies`
+  - If build or SCSS fails, revert and document the failure reason
+  - _Requirements: 1.1, 4.1, 4.2, 4.3, 4.4, 4.5_
+
+- [x] 4. Replace escape-string-regexp with native RegExp.escape()
+- [x] 4.1 Migrate all source files from escape-string-regexp to RegExp.escape()
+  - Replace `import escapeStringRegexp from 'escape-string-regexp'` and corresponding calls with `RegExp.escape()` in each file
+  - Files in apps/app/src: page.ts, page/index.ts, page-grant.ts, users.js, obsolete-page.js, openai.ts (6 files)
+  - Files in packages: core/src/utils/page-path-utils (2 files), remark-lsx/src/server/routes/list-pages/index.ts (1 file)
+  - Ensure each replacement preserves the exact same escaping behavior
+  - _Requirements: 4.1_
+
+- [x] 4.2 Remove escape-string-regexp dependency and verify
+  - Remove `escape-string-regexp` from apps/app/package.json dependencies
+  - Remove from packages/core and packages/remark-lsx package.json if listed
+  - Remove the `escape-string-regexp` comment from `// comments for dependencies`
+  - Remove `escape-string-regexp` entry from `transpilePackages` in next.config.ts
+  - Run `pnpm install` to update lockfile
+  - Run `turbo run build --filter @growi/app` to verify build
+  - Run `turbo run lint --filter @growi/app` to verify no type errors
+  - Run `turbo run test --filter @growi/app` to verify no regressions
+  - If RegExp.escape() has TypeScript issues, add type declaration or adjust lib config
+  - _Requirements: 2.1, 2.2, 4.1, 4.2, 4.3, 4.4, 4.5_
+
+- [x] 5. Upgrade next-themes to ^0.4.x
+- [x] 5.1 Update next-themes and adapt consuming code
+  - Change `next-themes` from `^0.2.1` to `^0.4.6` in apps/app/package.json
+  - Apply required API migration changes across the 12 consuming files identified in design
+  - Pay attention to any renamed exports, changed hook signatures, or provider prop changes
+  - Ensure `useTheme()` and `ThemeProvider` usage is compatible with v0.4.x API
+  - _Requirements: 1.2, 4.1_
+
+- [x] 5.2 Verify next-themes upgrade
+  - Run `turbo run build --filter @growi/app` to confirm build passes
+  - Run `turbo run lint --filter @growi/app` to check for type errors (original pinning was about types)
+  - Run `turbo run test --filter @growi/app` to confirm no regressions
+  - Remove the `next-themes` comment from `// comments for dependencies`
+  - If build or type errors occur, investigate whether the issue is the same as #122 or a new problem
+  - If upgrade fails, revert and document the reason; keep the pin with an updated comment
+  - _Requirements: 4.2, 4.3, 4.4, 4.5, 4.6_
+
+- [x] 6. Finalize audit documentation and comment blocks
+  - Verify `// comments for dependencies` block contains only packages that remain pinned (@keycloak if unchanged)
+  - Verify `// comments for defDependencies` block is accurate (handsontable entries unchanged)
+  - Update comment text to reflect current reasons where applicable
+  - Produce a final summary table in research.md documenting: package name, previous version, new version or "unchanged", and rationale
+  - Confirm all requirements are satisfied by reviewing the checklist against actual changes made
+  - _Requirements: 3.1, 3.2, 4.6, 5.1, 5.2, 5.3_

+ 14 - 8
.kiro/steering/structure.md

@@ -1,14 +1,20 @@
 # Project Structure
 
-@.claude/skills/monorepo-overview/SKILL.md
+See: `.claude/skills/monorepo-overview/SKILL.md` (auto-loaded by Claude Code)
 
 ## cc-sdd Specific Notes
 
-### Specification Storage
-- All specifications are stored in `.kiro/specs/{feature-name}/`
-- Each spec contains: `spec.json`, `requirements.md`, `design.md`, `tasks.md`
+### Server-Client Boundary Enforcement
 
-### Feature Placement
-When implementing new features via `/kiro:spec-impl`:
-- Create feature modules in `src/features/{feature-name}/`
-- Follow the server-client separation pattern documented in the skill above
+In full-stack packages (e.g., `apps/app`), server-side code (`src/server/`, models with mongoose) must NOT be imported from client components. This causes module leakage — server-only dependencies get pulled into the client bundle.
+
+- **Pattern**: If a client component needs functionality from a server module, extract the client-safe logic into a shared utility (`src/utils/` or `src/client/util/`)
+
+For apps/app-specific examples and build tooling details, see `apps/app/.claude/skills/build-optimization/SKILL.md`.
+
+### The positioning of @growi/core.
+
+See: `.claude/skills/monorepo-overview/SKILL.md` — "@growi/core — Domain & Utilities Hub" section
+
+---
+_Updated: 2026-03-24. @growi/core details moved to monorepo-overview SKILL.md (auto-loaded)._

+ 4 - 18
.kiro/steering/tdd.md

@@ -1,22 +1,8 @@
 # Test-Driven Development
 
-@.claude/commands/tdd.md
-@.claude/skills/testing-patterns-with-vitest/SKILL.md
+See: `.claude/commands/tdd.md`, `.claude/skills/learned/essential-test-patterns/SKILL.md` and `.claude/skills/learned/essential-test-design/SKILL.md`
 
-## cc-sdd Integration
+## cc-sdd Specific Notes
 
-### TDD in spec-impl Workflow
-When executing `/kiro:spec-impl`, the TDD cycle is mandatory:
-
-1. **Each task → TDD cycle**: RED → GREEN → REFACTOR
-2. **Tests trace to requirements**: Test names should reference EARS requirement IDs
-3. **Coverage gates completion**: Task is not complete until coverage targets met
-
-### Validation Before Task Completion
-```bash
-# Verify tests pass
-turbo run test --filter {package}
-
-# Check coverage (80% minimum)
-cd {package_dir} && pnpm vitest run --coverage src/utils/page-path-validator.spec.ts
-```
+Currently, there are no additional instructions specific to Kiro.
+If instructions specific to the cc-sdd workflow are needed in the future, add them to this section.

+ 53 - 9
.kiro/steering/tech.md

@@ -1,15 +1,59 @@
 # Technology Stack
 
-@.claude/skills/tech-stack/SKILL.md
+See: `.claude/skills/tech-stack/SKILL.md` (auto-loaded by Claude Code)
 
 ## cc-sdd Specific Notes
 
-### Specification Language
-All spec files (requirements.md, design.md, tasks.md) should be written in English unless explicitly configured otherwise in spec.json.
+### Bundler Strategy (Project-Wide Decision)
 
-### Build Verification
-Before marking tasks complete in `/kiro:spec-impl`, ensure:
-```bash
-turbo run lint --filter @growi/app
-turbo run test --filter @growi/app
-```
+GROWI uses **Turbopack** (Next.js 16 default) for **both development and production builds** (`next build` without flags). Webpack fallback is available via `USE_WEBPACK=1` environment variable for debugging only. All custom webpack loaders/plugins have been migrated to Turbopack equivalents (`turbopack.rules`, `turbopack.resolveAlias`). See `apps/app/.claude/skills/build-optimization/SKILL.md` for details.
+
+### Import Optimization Principles
+
+To prevent module count regression across the monorepo:
+
+- **Subpath imports over barrel imports** — e.g., `import { format } from 'date-fns/format'` instead of `from 'date-fns'`
+- **Lightweight replacements** — prefer small single-purpose packages over large multi-feature libraries
+- **Server-client boundary** — never import server-only code from client modules; extract client-safe utilities if needed
+
+### Turbopack Externalisation Rule (`apps/app/package.json`)
+
+**Any package that is reachable via a static `import` statement in SSR-executed code must be listed under `dependencies`, not `devDependencies`.**
+
+Turbopack externalises such packages to `.next/node_modules/` (symlinks into the pnpm store). `pnpm deploy --prod` only includes `dependencies`; packages in `devDependencies` are absent from the deploy output, causing `ERR_MODULE_NOT_FOUND` at production server startup.
+
+**SSR-executed code** = any module that Turbopack statically traces from a Pages Router page component, `_app.page.tsx`, or a server-side utility — without crossing a `dynamic(() => import(...), { ssr: false })` boundary.
+
+**Making a package devDep-eligible:**
+1. Wrap the consuming component with `dynamic(() => import('...'), { ssr: false })`, **or**
+2. Replace the runtime dependency with a static asset (e.g., extract data to a committed JSON file), **or**
+3. Change the import to a dynamic `import()` inside a `useEffect` (browser-only execution).
+
+**Packages justified to stay in `dependencies`** (SSR-reachable static imports as of v7.5):
+- `react-toastify` — `toastr.ts` static `{ toast }` import reachable from SSR pages; async refactor would break API surface
+- `bootstrap` — still externalised despite `useEffect`-guarded `import()` in `_app.page.tsx`; Turbopack traces call sites statically
+- `diff2html` — still externalised despite `ssr: false` on `RevisionDiff`; static import analysis reaches it
+- `react-dnd`, `react-dnd-html5-backend` — still externalised despite DnD provider wrapped with `ssr: false`
+- `@handsontable/react` — still externalised despite `useEffect` dynamic import in `HandsontableModal`
+- `i18next-http-backend`, `i18next-localstorage-backend`, `react-dropzone` — no direct `src/` imports but appear via transitive imports
+- `@codemirror/state`, `@headless-tree/*`, `@tanstack/react-virtual`, `downshift`, `fastest-levenshtein`, `pretty-bytes`, `react-copy-to-clipboard`, `react-hook-form`, `react-input-autosize`, `simplebar-react` — statically imported in SSR-rendered components
+
+### Production Assembly Pattern
+
+`assemble-prod.sh` produces the release artifact via **workspace-root staging** (not `apps/app/` staging):
+
+```
+pnpm deploy out --prod --legacy   → self-contained out/node_modules/ (pnpm v10)
+rm -rf node_modules
+mv out/node_modules node_modules  → workspace root is now prod-only
+ln -sfn ../../node_modules apps/app/node_modules  → compatibility symlink
+```
+
+The release image includes `node_modules/` at workspace root alongside `apps/app/`. Turbopack's `.next/node_modules/` symlinks (pointing `../../../../node_modules/.pnpm/`) resolve naturally without any sed-based rewriting. `apps/app/node_modules` is a symlink to `../../node_modules` for migration script and Node.js `require()` compatibility.
+
+**pnpm version sensitivity**: `--legacy` produces self-contained symlinks in pnpm v10+. Downgrading below v10 may break the assembly. After running `assemble-prod.sh` locally, run `pnpm install` to restore the development environment.
+
+For apps/app-specific build optimization details (webpack config, null-loader rules, SuperJSON architecture, module count KPI), see `apps/app/.claude/skills/build-optimization/SKILL.md`.
+
+---
+_Updated: 2026-03-17. Turbopack now used for production builds; expanded justified-deps list; added Production Assembly Pattern._

+ 1 - 21
.mcp.json

@@ -1,23 +1,3 @@
 {
-  "mcpServers": {
-    "context7": {
-      "type": "http",
-      "url": "https://mcp.context7.com/mcp"
-    },
-    "serena": {
-      "type": "stdio",
-      "command": "uvx",
-      "args": [
-        "--from",
-        "git+https://github.com/oraios/serena",
-        "serena-mcp-server",
-        "--context",
-        "claude-code",
-        "--project",
-        ".",
-        "--enable-web-dashboard=false"
-      ],
-      "env": {}
-    }
-  }
+  "mcpServers": {}
 }

+ 0 - 2
.npmrc

@@ -1,2 +0,0 @@
-# see: https://pnpm.io/next/npmrc#force-legacy-deploy
-force-legacy-deploy=true

+ 64 - 8
.serena/project.yml

@@ -1,9 +1,3 @@
-# language of the project (csharp, python, rust, java, typescript, go, cpp, or ruby)
-#  * For C, use cpp
-#  * For JavaScript, use typescript
-# Special requirements:
-#  * csharp: Requires the presence of a .sln file in the project folder.
-language: typescript
 
 # whether to use the project's gitignore file to ignore files
 # Added on 2025-04-07
@@ -59,10 +53,72 @@ read_only: false
 #  * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task.
 #  * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed.
 #  * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store.
-excluded_tools: ["check_onboarding_performed", "execute_shell_command", "initial_instructions", "onboarding", "prepare_for_new_conversation", "read_memory", "write_memory", "list_memories", "delete_memory"]
+excluded_tools:
+- "check_onboarding_performed"
+- "execute_shell_command"
+- "initial_instructions"
+- "onboarding"
+- "prepare_for_new_conversation"
+- "read_memory"
+- "write_memory"
+- "list_memories"
+- "delete_memory"
 
 # initial prompt for the project. It will always be given to the LLM upon activating the project
 # (contrary to the memories, which are loaded on demand).
 initial_prompt: ""
-
+# the name by which the project can be referenced within Serena
 project_name: "growi"
+
+# list of mode names to that are always to be included in the set of active modes
+# The full set of modes to be activated is base_modes + default_modes.
+# If the setting is undefined, the base_modes from the global configuration (serena_config.yml) apply.
+# Otherwise, this setting overrides the global configuration.
+# Set this to [] to disable base modes for this project.
+# Set this to a list of mode names to always include the respective modes for this project.
+base_modes:
+
+# list of mode names that are to be activated by default.
+# The full set of modes to be activated is base_modes + default_modes.
+# If the setting is undefined, the default_modes from the global configuration (serena_config.yml) apply.
+# Otherwise, this overrides the setting from the global configuration (serena_config.yml).
+# This setting can, in turn, be overridden by CLI parameters (--mode).
+default_modes:
+
+# list of tools to include that would otherwise be disabled (particularly optional tools that are disabled by default)
+included_optional_tools: []
+
+# fixed set of tools to use as the base tool set (if non-empty), replacing Serena's default set of tools.
+# This cannot be combined with non-empty excluded_tools or included_optional_tools.
+fixed_tools: []
+
+# the encoding used by text files in the project
+# For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings
+encoding: utf-8
+
+
+# list of languages for which language servers are started; choose from:
+#   al                  bash                clojure             cpp                 csharp
+#   csharp_omnisharp    dart                elixir              elm                 erlang
+#   fortran             fsharp              go                  groovy              haskell
+#   java                julia               kotlin              lua                 markdown
+#   matlab              nix                 pascal              perl                php
+#   powershell          python              python_jedi         r                   rego
+#   ruby                ruby_solargraph     rust                scala               swift
+#   terraform           toml                typescript          typescript_vts      vue
+#   yaml                zig
+#   (This list may be outdated. For the current list, see values of Language enum here:
+#   https://github.com/oraios/serena/blob/main/src/solidlsp/ls_config.py
+#   For some languages, there are alternative language servers, e.g. csharp_omnisharp, ruby_solargraph.)
+# Note:
+#   - For C, use cpp
+#   - For JavaScript, use typescript
+#   - For Free Pascal/Lazarus, use pascal
+# Special requirements:
+#   Some languages require additional setup/installations.
+#   See here for details: https://oraios.github.io/serena/01-about/020_programming-languages.html#language-servers
+# When using multiple languages, the first language server that supports a given file will be used for that file.
+# The first language is the default language and the respective language server will be used as a fallback.
+# Note that when using the JetBrains backend, language servers are not used and this list is correspondingly ignored.
+languages:
+- typescript

+ 3 - 6
AGENTS.md

@@ -4,7 +4,7 @@ GROWI is a team collaboration wiki platform built with Next.js, Express, and Mon
 
 ## Language Policy
 
-**Response Language**: If we detect at the beginning of a conversation that the user's primary language is not English, we will always respond in that language. However, technical terms may remain in English when appropriate.
+**Response Language**: If the user writes in a non-English language at any point in the conversation, always respond in that language from that point onward. This rule takes **absolute priority** over any other language instructions, including skill/command prompts or context documents written in English.
 
 **Code Comments**: When generating source code, all comments and explanations within the code must be written in English, regardless of the conversation language.
 
@@ -24,7 +24,6 @@ Technical information is available in **Claude Code Skills** (`.claude/skills/`)
 |-------|-------------|
 | **monorepo-overview** | Monorepo structure, workspace organization, Changeset versioning |
 | **tech-stack** | Technology stack, pnpm/Turborepo, TypeScript, Biome |
-| **testing-patterns-with-vitest** | Vitest, React Testing Library, vitest-mock-extended |
 
 **Rules** (always applied):
 
@@ -117,8 +116,7 @@ Always execute these checks:
 
 ```bash
 # From workspace root (recommended)
-turbo run lint:typecheck --filter @growi/app
-turbo run lint:biome --filter @growi/app
+turbo run lint --filter @growi/app
 turbo run test --filter @growi/app
 turbo run build --filter @growi/app
 ```
@@ -126,8 +124,7 @@ turbo run build --filter @growi/app
 Or from apps/app directory:
 
 ```bash
-pnpm run lint:typecheck
-pnpm run lint:biome
+pnpm run lint
 pnpm run test
 pnpm run build
 ```

+ 48 - 1
CHANGELOG.md

@@ -1,9 +1,56 @@
 # Changelog
 
-## [Unreleased](https://github.com/growilabs/compare/v7.4.4...HEAD)
+## [Unreleased](https://github.com/growilabs/compare/v7.4.7...HEAD)
 
 *Please do not manually update this file. We've automated the process.*
 
+## [v7.4.7](https://github.com/growilabs/compare/v7.4.6...v7.4.7) - 2026-03-23
+
+### 💎 Features
+
+* feat: Prevent inline mime type sniffing vulnerabilities (#10087) @arvid-e
+
+### 🐛 Bug Fixes
+
+* fix(editor): Disable bracketMatching to prevent IME composition rendering corruption (#10900) @miya
+
+## [v7.4.6](https://github.com/growilabs/compare/v7.4.5...v7.4.6) - 2026-03-10
+
+### 🐛 Bug Fixes
+
+* fix: mobile editor page title display (#10712) @satof3
+* fix: Exclude user page data from search response when user pages are disabled (#10740) @arvid-e
+* fix(search): prevent Downshift from intercepting Home/End keys in search input (#10815) @yuki-takei
+* fix: Openai thread IDOR (#10806) @ryotaro-nagahara
+
+### 🧰 Maintenance
+
+* support(claude): Add SessionStart hook for Claude Code on the web (#10816) @yuki-takei
+
+## [v7.4.5](https://github.com/growilabs/compare/v7.4.4...v7.4.5) - 2026-02-19
+
+### 💎 Features
+
+* feat: Realtime Increment View Count Without Refreshing Pages (#10760) @ryotaro-nagahara
+
+### 🚀 Improvement
+
+* imprv: Unchanged revision (#10770) @yuki-takei
+* imprv: Close the Sidebar in drawer mode when the route changes (#10763) @yuki-takei
+
+### 🐛 Bug Fixes
+
+* fix: Use currentPageId for share link page fetching (#10797) @yuki-takei
+* fix: Allow viewing shared pages regardless of page permissions (#10762) @ryotaro-nagahara
+* fix: Bulk export fails due to S3 upload minimal version (#10782) @ryotaro-nagahara
+* fix: Block revisions API from returning info about user pages when user pages are disabled (#10751) @arvid-e
+* fix: OpenAPI spec mismatch for GET /page endpoint response format (#10787) @[copilot-swe-agent[bot]](https://github.com/apps/copilot-swe-agent)
+
+### 🧰 Maintenance
+
+* support: Extract `/page/info` endpoint handler into a dedicated module (#10795) @yuki-takei
+* ci(deps): bump qs from 6.14.1 to 6.14.2 (#10785) @[dependabot[bot]](https://github.com/apps/dependabot)
+
 ## [v7.4.4](https://github.com/growilabs/compare/v7.4.3...v7.4.4) - 2026-01-30
 
 ### 🐛 Bug Fixes

+ 2 - 0
CLAUDE.md

@@ -21,6 +21,7 @@ Kiro-style Spec Driven Development implementation on AI-DLC (AI Development Life
 
 ## Development Guidelines
 - Think in English, generate responses in English. All Markdown content written to project files (e.g., requirements.md, design.md, tasks.md, research.md, validation reports) MUST be written in the target language configured for this specification (see spec.json.language).
+- **Note**: `spec.json.language` controls the language of spec document content only. It does NOT control the conversation response language. The conversation language is governed by the Language Policy in AGENTS.md.
 
 ## Minimal Workflow
 - Phase 0 (optional): `/kiro:steering`, `/kiro:steering-custom`
@@ -33,6 +34,7 @@ Kiro-style Spec Driven Development implementation on AI-DLC (AI Development Life
   - `/kiro:spec-tasks {feature} [-y]`
 - Phase 2 (Implementation): `/kiro:spec-impl {feature} [tasks]`
   - `/kiro:validate-impl {feature}` (optional: after implementation)
+  - `/kiro:spec-cleanup {feature}` (optional: organize specs post-implementation)
 - Progress check: `/kiro:spec-status {feature}` (use anytime)
 
 ## Development Rules

+ 1 - 1
README.md

@@ -81,7 +81,7 @@ See [GROWI Docs: Environment Variables](https://docs.growi.org/en/admin-guide/ad
 
 ## Dependencies
 
-- Node.js v18.x or v20.x
+- Node.js v24.x
 - npm 6.x
 - pnpm 9.x
 - [Turborepo](https://turbo.build/repo)

+ 1 - 1
README_JP.md

@@ -81,7 +81,7 @@ Crowi からの移行は **[こちら](https://docs.growi.org/en/admin-guide/mig
 
 ## 依存関係
 
-- Node.js v18.x or v20.x
+- Node.js v24.x
 - npm 6.x
 - pnpm 9.x
 - [Turborepo](https://turbo.build/repo)

+ 74 - 0
apps/app/.claude/rules/package-dependencies.md

@@ -0,0 +1,74 @@
+# Package Dependency Classification (Turbopack)
+
+## The Rule
+
+> Any package that appears in `apps/app/.next/node_modules/` after a production build MUST be listed under `dependencies`, not `devDependencies`.
+
+Turbopack externalises packages by generating runtime symlinks in `.next/node_modules/`. `pnpm deploy --prod` excludes `devDependencies`, so any externalised package missing from `dependencies` causes `ERR_MODULE_NOT_FOUND` in production.
+
+## How to Classify a New Package
+
+**Step 1 — Build and check:**
+
+```bash
+turbo run build --filter @growi/app
+ls apps/app/.next/node_modules/ | grep <package-name>
+```
+
+- **Found** → `dependencies`
+- **Not found** → `devDependencies` (if runtime code) or `devDependencies` (if build/test only)
+
+**Step 2 — If unsure, check the import site:**
+
+| Import pattern | Classification |
+|---|---|
+| `import foo from 'pkg'` at module level in SSR-executed code | `dependencies` |
+| `import type { Foo } from 'pkg'` only | `devDependencies` (type-erased at build) |
+| `await import('pkg')` inside `useEffect` / event handler | Check `.next/node_modules/` — may still be externalised (see `fix-broken-next-symlinks` skill) |
+| Used only in `*.spec.ts`, build scripts, or CI | `devDependencies` |
+
+## Common Misconceptions
+
+**`dynamic({ ssr: false })` does NOT prevent Turbopack externalisation.**
+It skips HTML rendering for that component but Turbopack still externalises packages found via static import analysis inside the dynamically-loaded file.
+
+**`useEffect`-guarded `import()` does NOT guarantee devDependencies.**
+Bootstrap and i18next backends are loaded this way yet still appear in `.next/node_modules/` due to transitive imports.
+
+## Packages Confirmed as devDependencies (Verified)
+
+These were successfully removed from production artifact by eliminating their SSR import path:
+
+| Package | Technique |
+|---|---|
+| `fslightbox-react` | Replaced static import with `import()` inside `useEffect` in `LightBox.tsx` |
+| `socket.io-client` | Replaced static import with `await import()` inside `useEffect` in `admin/states/socket-io.ts` |
+| `@emoji-mart/data` | Replaced runtime import with bundled static JSON (`emoji-native-lookup.json`) |
+
+## Verifying the Production Artifact
+
+### Level 1 — Externalisation check (30–60 s, local, incremental)
+
+Just want to know if a package gets externalised by Turbopack?
+
+```bash
+turbo run build --filter @growi/app
+ls apps/app/.next/node_modules/ | grep <package-name>
+# Found → dependencies required
+# Not found → devDependencies is safe
+```
+
+Turbopack build is incremental via cache, so subsequent runs after the first are fast.
+
+### Level 2 — CI (`reusable-app-prod.yml`, authoritative)
+
+Trigger via `workflow_dispatch` before merging. Runs two jobs:
+
+1. **`build-prod`**: `turbo run build` → `assemble-prod.sh` → **`check-next-symlinks.sh`** → archives production tarball
+2. **`launch-prod`**: extracts the tarball into a clean isolated directory (no workspace-root `node_modules`), runs `pnpm run server:ci`
+
+`check-next-symlinks.sh` scans every symlink in `.next/node_modules/` and fails the build if any are broken (except `fslightbox-react` which is intentionally broken but harmless). This catches classification errors regardless of which code paths are exercised at runtime.
+
+`server:ci` = `node dist/server/app.js --ci`: the server starts fully (loading all modules), then immediately exits with code 0. If any module fails to load (`ERR_MODULE_NOT_FOUND`), the process exits with code 1, failing the CI job.
+
+This exactly matches Docker production (no workspace fallback). A `build-prod` or `launch-prod` failure definitively means a missing `dependencies` entry.

+ 47 - 2
apps/app/.claude/skills/app-commands/SKILL.md

@@ -8,6 +8,37 @@ user-invocable: false
 
 Commands specific to the main GROWI application. For global commands (turbo, pnpm), see the global `tech-stack` skill.
 
+## Quality Check Commands
+
+**IMPORTANT**: Distinguish between Turborepo tasks and package-specific scripts.
+
+### Turbo Tasks vs Package Scripts
+
+| Task | Turborepo (turbo.json) | Package Script (package.json) |
+|------|------------------------|-------------------------------|
+| `lint` | ✅ Yes | ✅ Yes (runs all lint:\*) |
+| `test` | ✅ Yes | ✅ Yes |
+| `build` | ✅ Yes | ✅ Yes |
+| `lint:typecheck` | ❌ No | ✅ Yes |
+| `lint:biome` | ❌ No | ✅ Yes |
+| `lint:styles` | ❌ No | ✅ Yes |
+
+### Recommended Commands
+
+```bash
+# Run ALL quality checks (uses Turborepo caching)
+turbo run lint --filter @growi/app
+turbo run test --filter @growi/app
+turbo run build --filter @growi/app
+
+# Run INDIVIDUAL lint checks (package-specific scripts, from apps/app directory)
+pnpm run lint:typecheck   # TypeScript only
+pnpm run lint:biome       # Biome only
+pnpm run lint:styles      # Stylelint only
+```
+
+> **Running individual test files**: See the `testing` rule (`.claude/rules/testing.md`).
+
 ## Quick Reference
 
 | Task | Command |
@@ -70,10 +101,12 @@ Generated specs output to `tmp/openapi-spec-apiv3.json`.
 
 ```bash
 # Development mode
-pnpm run dev:pre:styles
+pnpm run dev:pre:styles-commons
+pnpm run dev:pre:styles-components
 
 # Production mode
-pnpm run pre:styles
+pnpm run pre:styles-commons
+pnpm run pre:styles-commons-components
 ```
 
 Pre-builds SCSS styles into CSS bundles using Vite.
@@ -109,6 +142,18 @@ pnpm run version:prerelease
 pnpm run version:preminor
 ```
 
+## Build Measurement
+
+```bash
+# Measure module count KPI (cleans .next, starts next dev, triggers compilation)
+./bin/measure-chunk-stats.sh           # default port 3099
+./bin/measure-chunk-stats.sh 3001      # custom port
+```
+
+Output: `[ChunkModuleStats] initial: N, async-only: N, total: N`
+
+For details on module optimization and baselines, see the `build-optimization` skill.
+
 ## Production
 
 ```bash

+ 1 - 1
apps/app/.claude/skills/app-specific-patterns/SKILL.md

@@ -6,7 +6,7 @@ user-invocable: false
 
 # App Specific Patterns (apps/app)
 
-For general testing patterns, see the global `testing-patterns-with-vitest` skill.
+For general testing patterns, see the global `.claude/skills/learned/essential-test-patterns` and `.claude/skills/learned/essential-test-design` skills.
 
 ## Next.js Pages Router
 

+ 127 - 0
apps/app/.claude/skills/build-optimization/SKILL.md

@@ -0,0 +1,127 @@
+---
+name: build-optimization
+description: GROWI apps/app Turbopack configuration, module optimization, and build measurement tooling. Auto-invoked when working in apps/app.
+user-invocable: false
+---
+
+# Build Optimization (apps/app)
+
+## Next.js Version & Bundler
+
+- **Next.js 16** (`^16.0.0`) with **Turbopack** bundler (default)
+- Build: `next build`; Dev: Express server calls `next({ dev })` which uses Turbopack by default
+- React stays at `^18.2.0` — Pages Router has full React 18 support in v16
+- Webpack has been fully removed (no `webpack()` hook, no `--webpack` flag)
+
+## Turbopack Configuration
+
+### Custom Loader Rules (`turbopack.rules`)
+
+| Rule | Pattern | Condition | Purpose |
+|------|---------|-----------|---------|
+| superjson-ssr-loader | `*.page.ts`, `*.page.tsx` | `{ not: 'browser' }` (server-only) | Auto-wraps `getServerSideProps` with SuperJSON serialization |
+
+- Loaders are registered in `next.config.ts` under `turbopack.rules`
+- `condition: { not: 'browser' }` restricts the loader to server-side compilation only
+- `as: '*.ts'` / `as: '*.tsx'` tells Turbopack to continue processing the transformed output as TypeScript
+
+### Resolve Aliases (`turbopack.resolveAlias`)
+
+7 server-only packages + `fs` are aliased to `./src/lib/empty-module.ts` in browser context:
+
+| Package | Reason |
+|---------|--------|
+| `fs` | Node.js built-in, not available in browser |
+| `dtrace-provider` | Native module, server-only |
+| `mongoose` | MongoDB driver, server-only |
+| `i18next-fs-backend` | File-system i18n loader, server-only |
+| `bunyan` | Server-side logger |
+| `bunyan-format` | Server-side logger formatter |
+| `core-js` | Server-side polyfills |
+
+- Uses conditional `{ browser: './src/lib/empty-module.ts' }` syntax so server-side resolution is unaffected
+- `resolveAlias` requires **relative paths** (e.g., `./src/lib/empty-module.ts`), not absolute paths — absolute paths cause "server relative imports are not implemented yet" errors
+- If a new server-only package leaks into the client bundle, add it to `resolveAlias` with the same pattern
+
+## SuperJSON Serialization Architecture
+
+The `next-superjson` SWC plugin was replaced by a custom loader:
+
+- **Build time**: `superjson-ssr-loader.ts` auto-wraps `getServerSideProps` in `.page.{ts,tsx}` files with `withSuperJSONProps()` via Turbopack `rules`
+- **Runtime (server)**: `withSuperJSONProps()` in `src/pages/utils/superjson-ssr.ts` serializes props via superjson
+- **Runtime (client)**: `_app.page.tsx` calls `deserializeSuperJSONProps()` for centralized deserialization
+- **No per-page changes needed** — new pages automatically get superjson serialization
+- Custom serializers registered in `_app.page.tsx` (ObjectId, PageRevisionWithMeta)
+
+## CSS Modules Turbopack Compatibility
+
+### `:global` Syntax
+
+Turbopack only supports the **function form** `:global(...)`. The block form `:global { ... }` is NOT supported:
+
+```scss
+// WRONG — Turbopack rejects this
+.parent :global {
+  .child { color: red; }
+}
+
+// CORRECT — function form
+.parent {
+  :global(.child) { color: red; }
+}
+```
+
+Nested blocks must also use function form:
+
+```scss
+// WRONG
+.parent :global {
+  .child {
+    .grandchild { }
+  }
+}
+
+// CORRECT
+.parent {
+  :global(.child) {
+    :global(.grandchild) { }
+  }
+}
+```
+
+### Other Turbopack CSS Restrictions
+
+- **Standalone `:local` / `&:local`**: Not supported. Inside `:global(...)`, properties are locally scoped by default — remove `&:local` wrappers
+- **`@extend` with `:global()`**: `@extend .class` fails when target is wrapped in `:global(.class)` — Sass doesn't match them as the same selector. Use shared selector groups (comma-separated selectors) instead
+- **IE CSS hacks**: `*zoom:1`, `*display:inline`, `filter:alpha()` cannot be parsed by Turbopack's CSS parser (lightningcss). Avoid CSS files containing these hacks
+
+### Vendor CSS Imports
+
+Global CSS cannot be imported from files other than `_app.page.tsx` under Turbopack Pages Router. See the `vendor-styles-components` skill for the precompilation system that handles per-component vendor CSS.
+
+## Module Optimization Configuration
+
+- `serverExternalPackages: ['handsontable']` — packages excluded from server-side bundling
+- `optimizePackageImports` — 11 `@growi/*` packages configured (expansion to third-party packages was tested and reverted — it increased dev module count)
+
+## Effective Module Reduction Techniques
+
+Techniques that have proven effective for reducing module count, ordered by typical impact:
+
+| Technique | When to Use |
+|-----------|-------------|
+| `next/dynamic({ ssr: true })` | Heavy rendering pipelines (markdown, code highlighting) that can be deferred to async chunks while preserving SSR |
+| `next/dynamic({ ssr: false })` | Client-only heavy components (e.g., Mermaid diagrams, interactive editors) |
+| Subpath imports | Packages with large barrel exports (e.g., `date-fns/format` instead of `date-fns`) |
+| Deep ESM imports | Packages that re-export multiple engines via barrel (e.g., `react-syntax-highlighter/dist/esm/prism-async-light`) |
+| resolveAlias | Server-only packages leaking into client bundle via transitive imports |
+| Lightweight replacements | Replace large libraries used for a single feature (e.g., `tinykeys` instead of `react-hotkeys`, regex instead of `validator`) |
+
+### Techniques That Did NOT Work
+
+- **Expanding `optimizePackageImports` to third-party packages** — In dev mode, this resolves individual sub-module files instead of barrel, resulting in MORE module entries. Reverted.
+- **Refactoring internal barrel exports** — Internal barrels (`states/`, `features/`) are small and well-scoped; refactoring had no measurable impact.
+
+## i18n HMR
+
+`I18NextHMRPlugin` was removed during the Turbopack migration. Translation file changes require a manual browser refresh. The performance gain from Turbopack (faster Fast Refresh overall) outweighs the loss of i18n-specific HMR. Monitor if `i18next-hmr` adds Turbopack support in the future.

+ 90 - 0
apps/app/.claude/skills/learned/fix-broken-next-symlinks/SKILL.md

@@ -0,0 +1,90 @@
+---
+name: fix-broken-next-symlinks
+description: Fix broken symlinks in .next/node_modules/ — diagnose, decide allowlist vs dependencies, and verify
+---
+
+## IMPORTANT
+
+This document is a **mandatory step-by-step procedure**. When fixing broken symlinks, execute every step in order. In particular, verification **always** requires the full 3-command sequence: `build` → `assemble-prod.sh` → `check-next-symlinks.sh`. Never skip `assemble-prod.sh` — the symlink check is only meaningful after production assembly.
+
+## Problem
+
+Turbopack externalizes packages into `.next/node_modules/` as symlinks, even for packages imported only via dynamic `import()` inside `useEffect`. After `assemble-prod.sh` runs `pnpm deploy --prod`, `devDependencies` are excluded, breaking those symlinks. `check-next-symlinks.sh` detects these and fails the build.
+
+## Diagnosis
+
+### Step 1 — Reproduce locally
+
+```bash
+turbo run build --filter @growi/app
+bash apps/app/bin/assemble-prod.sh
+bash apps/app/bin/check-next-symlinks.sh
+```
+
+If the check reports `BROKEN: apps/app/.next/node_modules/<package>-<hash>`, proceed to Step 2.
+
+### Step 2 — Determine the fix
+
+Search all import sites of the broken package:
+
+```bash
+grep -rn "from ['\"]<package-name>['\"]" apps/app/src/
+grep -rn "import(['\"]<package-name>['\"])" apps/app/src/
+```
+
+Apply the decision tree:
+
+```
+Is the package imported ONLY via:
+  - `import type { ... } from 'pkg'`  (erased at compile time)
+  - `await import('pkg')` inside useEffect / event handler  (client-side only, never SSR)
+
+  YES → Add to ALLOWED_BROKEN in check-next-symlinks.sh  (Step 3a)
+  NO  → Move from devDependencies to dependencies          (Step 3b)
+```
+
+### Step 3a — Add to allowlist
+
+Edit `apps/app/bin/check-next-symlinks.sh`:
+
+```bash
+ALLOWED_BROKEN=(
+  fslightbox-react
+  @emoji-mart/data
+  @emoji-mart/react
+  socket.io-client
+  <new-package>          # <-- add here
+)
+```
+
+Use the bare package name (e.g., `socket.io-client`), not the hashed symlink name (`socket.io-client-46e5ba4d4c848156`).
+
+### Step 3b — Move to dependencies
+
+In `apps/app/package.json`, move the package from `devDependencies` to `dependencies`, then run `pnpm install`.
+
+### Step 4 — Verify the fix
+
+Re-run the full sequence:
+
+```bash
+turbo run build --filter @growi/app
+bash apps/app/bin/assemble-prod.sh
+bash apps/app/bin/check-next-symlinks.sh
+```
+
+Expected output: `OK: All apps/app/.next/node_modules symlinks resolve correctly.`
+
+## Example
+
+`socket.io-client` is used in two files:
+- `src/states/socket-io/global-socket.ts` — `import type` + `await import()` inside `useEffect`
+- `src/features/admin/states/socket-io.ts` — `import type` + `import()` inside `useEffect`
+
+Both are client-only dynamic imports → added to `ALLOWED_BROKEN`, stays as `devDependencies`.
+
+## When to Apply
+
+- CI fails at "Check for broken symlinks in .next/node_modules" step
+- `check-next-symlinks.sh` reports `BROKEN: apps/app/.next/node_modules/<package>-<hash>`
+- After adding a new package or changing import patterns in apps/app

+ 302 - 0
apps/app/.claude/skills/learned/page-save-origin-semantics/SKILL.md

@@ -0,0 +1,302 @@
+---
+name: page-save-origin-semantics
+description: Auto-invoked when modifying origin-based conflict detection, revision validation logic, or isUpdatable() method. Explains the two-stage origin check mechanism for conflict detection and its separation from diff detection.
+---
+
+# Page Save Origin Semantics
+
+## Problem
+
+When modifying page save logic, it's easy to accidentally break the carefully designed origin-based conflict detection system. The system uses a two-stage check mechanism (frontend + backend) to determine when revision validation should be enforced vs. bypassed for collaborative editing (Yjs).
+
+**Key Insight**: **Conflict detection (revision check)** and **diff detection (hasDiffToPrev)** serve different purposes and require separate logic.
+
+## Solution
+
+Understanding the two-stage origin check mechanism:
+
+### Stage 1: Frontend Determines revisionId Requirement
+
+```typescript
+// apps/app/src/client/components/PageEditor/PageEditor.tsx:158
+const isRevisionIdRequiredForPageUpdate = currentPage?.revision?.origin === undefined;
+
+// lines 308-310
+const revisionId = isRevisionIdRequiredForPageUpdate
+  ? currentRevisionId
+  : undefined;
+```
+
+**Logic**: Check the **latest revision's origin** on the page:
+- If `origin === undefined` (legacy/API save) → Send `revisionId`
+- If `origin === "editor"` or `"view"` → Do NOT send `revisionId`
+
+### Stage 2: Backend Determines Conflict Check Behavior
+
+```javascript
+// apps/app/src/server/models/obsolete-page.js:167-172
+const ignoreLatestRevision =
+  origin === Origin.Editor &&
+  (latestRevisionOrigin === Origin.Editor || latestRevisionOrigin === Origin.View);
+
+if (ignoreLatestRevision) {
+  return true;  // Bypass revision check
+}
+
+// Otherwise, enforce strict revision matching
+if (revision != previousRevision) {
+  return false;  // Reject save
+}
+return true;
+```
+
+**Logic**: Check **current request's origin** AND **latest revision's origin**:
+- If `origin === "editor"` AND latest is `"editor"` or `"view"` → Bypass revision check
+- Otherwise → Enforce strict revision ID matching
+
+## Origin Values
+
+Three types of page update methods (called "origin"):
+
+- **`Origin.Editor = "editor"`** - Save from editor mode (collaborative editing via Yjs)
+- **`Origin.View = "view"`** - Save from view mode
+  - Examples: HandsontableModal, DrawioModal editing
+- **`undefined`** - API-based saves or legacy pages
+
+## Origin Strength (強弱)
+
+**Basic Rule**: Page updates require the previous revision ID in the request. If the latest revision doesn't match, the server rejects the request.
+
+**Exception - Editor origin is stronger than View origin**:
+- **UX Goal**: Avoid `Posted param "revisionId" is outdated` errors when multiple members are using the Editor and View changes interrupt them
+- **Special Case**: When the latest revision's origin is View, Editor origin requests can update WITHOUT requiring revision ID
+
+### Origin Strength Matrix
+
+|        | Latest Revision: Editor | Latest Revision: View | Latest Revision: API |
+| ------ | ----------------------- | --------------------- | -------------------- |
+| **Request: Editor** | ⭕️ Bypass revision check | ⭕️ Bypass revision check | ❌ Strict check |
+| **Request: View**   | ❌ Strict check | ❌ Strict check | ❌ Strict check |
+| **Request: API**    | ❌ Strict check | ❌ Strict check | ❌ Strict check |
+
+**Reading the table**:
+- ⭕️ = Revision check bypassed (revisionId not required)
+- ❌ = Strict revision check required (revisionId must match)
+
+## Behavior by Scenario
+
+| Latest Revision Origin | Request Origin | revisionId Sent? | Revision Check | Use Case |
+|------------------------|----------------|------------------|----------------|----------|
+| `editor` or `view` | `editor` | ❌ No | ✅ Bypassed | Normal Editor use (most common) |
+| `undefined` | `editor` | ✅ Yes | ✅ Enforced | Legacy page in Editor |
+| `undefined` | `undefined` (API) | ✅ Yes (required) | ✅ Enforced | API save |
+
+## Example: Server-Side Logic Respecting Origin Semantics
+
+When adding server-side functionality that needs previous revision data:
+
+```typescript
+// ✅ CORRECT: Separate concerns - conflict detection vs. diff detection
+let previousRevision: IRevisionHasId | null = null;
+
+// Priority 1: Use provided revisionId (for conflict detection)
+if (sanitizeRevisionId != null) {
+  previousRevision = await Revision.findById(sanitizeRevisionId);
+}
+
+// Priority 2: Fallback to currentPage.revision (for other purposes like diff detection)
+if (previousRevision == null && currentPage.revision != null) {
+  previousRevision = await Revision.findById(currentPage.revision);
+}
+
+const previousBody = previousRevision?.body ?? null;
+
+// Continue with existing conflict detection logic (unchanged)
+if (currentPage != null && !(await currentPage.isUpdatable(sanitizeRevisionId, origin))) {
+  // ... return conflict error
+}
+
+// Use previousBody for diff detection or other purposes
+updatedPage = await crowi.pageService.updatePage(
+  currentPage,
+  body,
+  previousBody,  // ← Available regardless of conflict detection logic
+  req.user,
+  options,
+);
+```
+
+```typescript
+// ❌ WRONG: Forcing frontend to always send revisionId
+const revisionId = currentRevisionId;  // Always send, regardless of origin
+// This breaks Yjs collaborative editing semantics!
+```
+
+```typescript
+// ❌ WRONG: Changing backend conflict detection logic
+// Don't modify isUpdatable() unless you fully understand the implications
+// for collaborative editing
+```
+
+## When to Apply
+
+**Always consider this pattern when**:
+- Modifying page save/update API handlers
+- Adding functionality that needs previous revision data
+- Working on conflict detection or revision validation logic
+- Implementing features that interact with page history
+- Debugging save operation issues
+
+**Key Principles**:
+1. **Do NOT modify frontend revisionId logic** unless explicitly required for conflict detection
+2. **Do NOT modify isUpdatable() logic** unless fixing conflict detection bugs
+3. **Separate concerns**: Conflict detection ≠ Other revision-based features (diff detection, history, etc.)
+4. **Server-side fallback**: If you need previous revision data when revisionId is not provided, fetch from `currentPage.revision`
+
+## Detailed Scenario Analysis
+
+### Scenario A: Normal Editor Mode (Most Common Case)
+
+**Latest revision has `origin=editor`**:
+
+1. **Frontend Logic**:
+   - `isRevisionIdRequiredForPageUpdate = false` (latest revision origin is not undefined)
+   - Does NOT send `revisionId` in request
+   - Sends `origin: Origin.Editor`
+
+2. **API Layer**:
+   ```typescript
+   previousRevision = await Revision.findById(undefined);  // → null
+   ```
+   Result: No previousRevision fetched via revisionId
+
+3. **Backend Conflict Check** (`isUpdatable`):
+   ```javascript
+   ignoreLatestRevision =
+     (Origin.Editor === Origin.Editor) &&
+     (latestRevisionOrigin === Origin.Editor || latestRevisionOrigin === Origin.View)
+   // → true (latest revision is editor)
+   return true;  // Bypass revision check
+   ```
+   Result: ✅ Save succeeds without revision validation
+
+4. **Impact on Other Features**:
+   - If you need previousRevision data (e.g., for diff detection), it won't be available unless you implement server-side fallback
+   - This is where `currentPage.revision` fallback becomes necessary
+
+### Scenario B: Legacy Page in Editor Mode
+
+**Latest revision has `origin=undefined`**:
+
+1. **Frontend Logic**:
+   - `isRevisionIdRequiredForPageUpdate = true` (latest revision origin is undefined)
+   - Sends `revisionId` in request
+   - Sends `origin: Origin.Editor`
+
+2. **API Layer**:
+   ```typescript
+   previousRevision = await Revision.findById(sanitizeRevisionId);  // → revision object
+   ```
+   Result: previousRevision fetched successfully
+
+3. **Backend Conflict Check** (`isUpdatable`):
+   ```javascript
+   ignoreLatestRevision =
+     (Origin.Editor === Origin.Editor) &&
+     (latestRevisionOrigin === undefined)
+   // → false (latest revision is undefined, not editor/view)
+
+   // Strict revision check
+   if (revision != sanitizeRevisionId) {
+     return false;  // Reject if mismatch
+   }
+   return true;
+   ```
+   Result: ✅ Save succeeds only if revisionId matches
+
+4. **Impact on Other Features**:
+   - previousRevision data is available
+   - All revision-based features work correctly
+
+### Scenario C: API-Based Save
+
+**Request has `origin=undefined` or omitted**:
+
+1. **Frontend**: Not applicable (API client)
+
+2. **API Layer**:
+   - API client MUST send `revisionId` in request
+   - `previousRevision = await Revision.findById(sanitizeRevisionId)`
+
+3. **Backend Conflict Check** (`isUpdatable`):
+   ```javascript
+   ignoreLatestRevision =
+     (undefined === Origin.Editor) && ...
+   // → false
+
+   // Strict revision check
+   if (revision != sanitizeRevisionId) {
+     return false;
+   }
+   return true;
+   ```
+   Result: Strict validation enforced
+
+## Root Cause: Why This Separation Matters
+
+**Historical Context**: At some point, the frontend stopped sending `previousRevision` (revisionId) for certain scenarios to support Yjs collaborative editing. This broke features that relied on previousRevision data being available.
+
+**The Core Issue**:
+- **Conflict detection** needs to know "Is this save conflicting with another user's changes?" (Answered by revision check)
+- **Diff detection** needs to know "Did the content actually change?" (Answered by comparing body)
+- **Current implementation conflates these**: When conflict detection is bypassed, previousRevision is not fetched, breaking diff detection
+
+**The Solution Pattern**:
+```typescript
+// Separate the two concerns:
+
+// 1. Fetch previousRevision for data purposes (diff detection, history, etc.)
+let previousRevision: IRevisionHasId | null = null;
+if (sanitizeRevisionId != null) {
+  previousRevision = await Revision.findById(sanitizeRevisionId);
+} else if (currentPage.revision != null) {
+  previousRevision = await Revision.findById(currentPage.revision);  // Fallback
+}
+
+// 2. Use previousRevision data for your feature
+const previousBody = previousRevision?.body ?? null;
+
+// 3. Conflict detection happens independently via isUpdatable()
+if (currentPage != null && !(await currentPage.isUpdatable(sanitizeRevisionId, origin))) {
+  // Return conflict error
+}
+```
+
+## Reference
+
+**Official Documentation**:
+- https://dev.growi.org/651a6f4a008fee2f99187431#origin-%E3%81%AE%E5%BC%B7%E5%BC%B1
+
+**Related Files**:
+- Frontend: `apps/app/src/client/components/PageEditor/PageEditor.tsx` (lines 158, 240, 308-310)
+- Backend: `apps/app/src/server/models/obsolete-page.js` (lines 159-182, isUpdatable method)
+- API: `apps/app/src/server/routes/apiv3/page/update-page.ts` (lines 260-282, conflict check)
+- Interface: `packages/core/src/interfaces/revision.ts` (lines 6-11, Origin definition)
+
+## Common Pitfalls
+
+1. **Assuming revisionId is always available**: It's not! Editor mode with recent editor/view saves omits it by design.
+2. **Conflating conflict detection with other features**: They serve different purposes and need separate logic.
+3. **Breaking Yjs collaborative editing**: Forcing revisionId to always be sent breaks the bypass mechanism.
+4. **Ignoring origin values**: The system behavior changes significantly based on origin combinations.
+
+## Lessons Learned
+
+This pattern was identified during the "improve-unchanged-revision" feature implementation, where the initial assumption was that frontend should always send `revisionId` for diff detection. Deep analysis revealed:
+
+- The frontend logic is correct for conflict detection and should NOT be changed
+- Server-side fallback is the correct approach to get previous revision data
+- Two-stage checking is intentional and critical for Yjs collaborative editing
+- Conflict detection and diff detection must be separated
+
+**Key Takeaway**: Always understand the existing architectural patterns before proposing changes. What appears to be a "fix" might actually break carefully designed functionality.

+ 116 - 0
apps/app/.claude/skills/vendor-styles-components/SKILL.md

@@ -0,0 +1,116 @@
+---
+name: vendor-styles-components
+description: Vendor CSS precompilation system for Turbopack compatibility. How to add third-party CSS to components without violating Pages Router global CSS restriction. Auto-invoked when working in apps/app.
+---
+
+# Vendor CSS Precompilation (apps/app)
+
+## Problem
+
+Turbopack (Pages Router) strictly enforces: **global CSS can only be imported from `_app.page.tsx`**. Components cannot `import 'package/style.css'` directly — Turbopack rejects these at compile time.
+
+Centralizing all vendor CSS in `_app` would degrade FCP for pages that don't need those styles.
+
+## Solution: Two-Track Vendor CSS System
+
+### Commons Track (globally shared CSS)
+
+- **File**: `src/styles/vendor.scss`
+- **For**: CSS needed on most pages (e.g., `simplebar-react`)
+- **Mechanism**: Compiled via `vite.vendor-styles-commons.ts` into `src/styles/prebuilt/`
+- **Imported from**: `_app.page.tsx`
+
+### Components Track (component-specific CSS)
+
+- **For**: CSS needed only by specific components
+- **Mechanism**: Vite precompiles `*.vendor-styles.ts` entry points into `*.vendor-styles.prebuilt.ts` using `?inline` CSS import suffix
+- **Output**: Pure JS modules (no CSS imports) — Turbopack sees them as regular JS
+
+## How It Works
+
+1. **Entry point** (`ComponentName.vendor-styles.ts`): imports CSS via Vite `?inline` suffix, which inlines CSS as a string
+2. **Runtime injection**: the entry point creates a `<style>` tag and appends CSS to `document.head`
+3. **Vite prebuild** (`pre:styles-components` Turborepo task): compiles entry points into `*.vendor-styles.prebuilt.ts`
+4. **Component import**: imports the `.prebuilt.ts` file instead of raw CSS
+
+### Entry Point Template
+
+```typescript
+// @ts-nocheck -- Processed by Vite only; ?inline is a Vite-specific import suffix
+import css from 'some-package/dist/style.css?inline';
+
+const s = document.createElement('style');
+s.textContent = css;
+document.head.appendChild(s);
+```
+
+For multiple CSS sources in one component:
+
+```typescript
+// @ts-nocheck
+import css1 from 'package-a/style.css?inline';
+import css2 from 'package-b/style.css?inline';
+
+const s = document.createElement('style');
+s.textContent = css1 + css2;
+document.head.appendChild(s);
+```
+
+## Current Entry Points
+
+| Entry Point | CSS Sources | Consuming Components |
+|---|---|---|
+| `Renderer.vendor-styles.ts` | `@growi/remark-lsx`, `@growi/remark-attachment-refs`, `katex` | renderer.tsx |
+| `GrowiEditor.vendor-styles.ts` | `@growi/editor` | PageEditor, CommentEditor |
+| `HandsontableModal.vendor-styles.ts` | `handsontable` (non-full variant) | HandsontableModal |
+| `DateRangePicker.vendor-styles.ts` | `react-datepicker` | DateRangePicker |
+| `RevisionDiff.vendor-styles.ts` | `diff2html` | RevisionDiff |
+| `DrawioViewerWithEditButton.vendor-styles.ts` | `@growi/remark-drawio` | DrawioViewerWithEditButton |
+| `ImageCropModal.vendor-styles.ts` | `react-image-crop` | ImageCropModal |
+| `Presentation.vendor-styles.ts` | `@growi/presentation` | Presentation, Slides |
+
+## Adding New Vendor CSS
+
+1. Create `{ComponentName}.vendor-styles.ts` next to the consuming component:
+   ```typescript
+   // @ts-nocheck
+   import css from 'new-package/dist/style.css?inline';
+   const s = document.createElement('style');
+   s.textContent = css;
+   document.head.appendChild(s);
+   ```
+2. In the component, replace `import 'new-package/dist/style.css'` with:
+   ```typescript
+   import './ComponentName.vendor-styles.prebuilt';
+   ```
+3. Run `pnpm run pre:styles-components` (or let Turborepo handle it during `dev`/`build`)
+4. The `.prebuilt.js` file is git-ignored and auto-generated
+
+**Decision guide**: If the CSS is needed on nearly every page, add it to the commons track (`vendor.scss`) instead.
+
+## Font/Asset Handling
+
+When vendor CSS references external assets (e.g., KaTeX `@font-face` with `url(fonts/KaTeX_*.woff2)`):
+
+- Vite emits asset files to `src/assets/` during build
+- The `moveAssetsToPublic` plugin (in `vite.vendor-styles-components.ts`) relocates them to `public/static/fonts/`
+- URL references in prebuilt JS are rewritten from `/assets/` to `/static/fonts/`
+- Fonts are served by the existing `express.static(crowi.publicDir)` middleware
+- Both `public/static/fonts/` and `src/**/*.vendor-styles.prebuilt.ts` are git-ignored
+
+## Build Pipeline Integration
+
+```
+turbo.json tasks:
+  pre:styles-components  →  build (dependency)
+  dev:pre:styles-components  →  dev (dependency)
+
+Inputs:  vite.vendor-styles-components.ts, src/**/*.vendor-styles.ts, package.json
+Outputs: src/**/*.vendor-styles.prebuilt.ts, public/static/fonts/**
+```
+
+## Important Caveats
+
+- **SSR**: CSS is injected via `<style>` tags at runtime — not available during SSR. Most consuming components use `next/dynamic({ ssr: false })`, so FOUC is not a practical concern
+- **`@ts-nocheck`**: Required because `?inline` is a Vite-specific import suffix not understood by TypeScript
+- **handsontable**: Must use `handsontable/dist/handsontable.css` (non-full, non-minified). The "full" variant (`handsontable.full.min.css`) contains IE CSS hacks (`*zoom:1`, `filter:alpha()`) that Turbopack's CSS parser (lightningcss) cannot parse. The "full" variant also includes Pikaday which is unused.

+ 2 - 0
apps/app/.env.development

@@ -4,6 +4,8 @@
 ##
 MIGRATIONS_DIR=src/migrations/
 
+NEXT_TELEMETRY_DISABLED=1
+
 APP_SITE_URL=http://localhost:3000
 FILE_UPLOAD=mongodb
 # MONGO_GRIDFS_TOTAL_LIMIT=10485760

+ 4 - 0
apps/app/.gitignore

@@ -1,6 +1,8 @@
 # next.js
 /.next/
 /out/
+next-env.d.ts
+next.config.js
 
 # test
 .reg
@@ -9,10 +11,12 @@
 /build/
 /dist/
 /transpiled/
+/public/static/fonts
 /public/static/js
 /public/static/styles
 /public/uploads
 /src/styles/prebuilt
+/src/**/*.vendor-styles.prebuilt.*
 /tmp/
 
 # cache

+ 13 - 1
apps/app/AGENTS.md

@@ -29,6 +29,10 @@ pnpm run test                   # Run tests
 
 # Build
 pnpm run build                  # Build for production
+
+# Run Specific Tests
+pnpm vitest run yjs.integ       # Use partial file name
+pnpm vitest run helper.spec     # Vitest auto-finds matching files
 ```
 
 ### Key Directories
@@ -150,8 +154,16 @@ When working in this directory, these skills are automatically loaded:
 - **app-commands** - apps/app specific commands (migrations, OpenAPI, etc.)
 - **app-specific-patterns** - Jotai/SWR/Next.js patterns, testing
 
-Plus all global skills (monorepo-overview, tech-stack, testing-patterns-with-vitest).
+Plus all global skills (monorepo-overview, tech-stack).
 
 ---
 
 For detailed patterns and examples, refer to the Skills in `.claude/skills/`.
+
+## Rules (Always Applied)
+
+The following rules in `.claude/rules/` are always applied when working in this directory:
+
+| Rule | Description |
+|------|-------------|
+| **package-dependencies** | Turbopack dependency classification — when to use `dependencies` vs `devDependencies`, verification procedure |

+ 18 - 0
apps/app/CLAUDE.md

@@ -1 +1,19 @@
 @AGENTS.md
+
+# apps/app Specific Knowledge
+
+## Critical Architectural Patterns
+
+### Page Save Origin Semantics
+
+**IMPORTANT**: When working on page save, update, or revision operations, always consult the **page-save-origin-semantics** skill for understanding the two-stage origin check mechanism.
+
+**Key Concept**: Origin-based conflict detection uses a two-stage check (frontend + backend) to determine when revision validation should be enforced vs. bypassed for Yjs collaborative editing.
+
+**Critical Rule**: **Conflict detection (revision check)** and **other revision-based features (diff detection, history, etc.)** serve different purposes and require separate logic. Do NOT conflate them.
+
+**Documentation**:
+- Skill (auto-invoked): `.claude/skills/learned/page-save-origin-semantics/SKILL.md`
+
+**Common Pitfall**: Assuming `revisionId` is always available or forcing frontend to always send it will break Yjs collaborative editing.
+

+ 30 - 0
apps/app/bin/assemble-prod.sh

@@ -0,0 +1,30 @@
+#!/bin/bash
+# Assemble production artifacts for GROWI app.
+# Run from the workspace root.
+set -euo pipefail
+
+echo "[1/4] Collecting production dependencies..."
+rm -rf out
+pnpm deploy out --prod --legacy --filter @growi/app
+echo "[1/4] Done."
+
+echo "[2/4] Reorganizing node_modules..."
+rm -rf node_modules
+mv out/node_modules node_modules
+rm -rf apps/app/node_modules
+ln -sfn ../../node_modules apps/app/node_modules
+rm -rf out
+echo "[2/4] Done."
+
+echo "[3/4] Removing build cache..."
+rm -rf apps/app/.next/cache
+echo "[3/4] Done."
+
+# Provide a CJS runtime config so the production server can load it without TypeScript.
+# next.config.js takes precedence over next.config.ts in Next.js, so the .ts file
+# is left in place but effectively ignored at runtime.
+echo "[4/4] Installing runtime next.config.js..."
+cp apps/app/next.config.prod.cjs apps/app/next.config.js
+echo "[4/4] Done."
+
+echo "Assembly complete."

+ 44 - 0
apps/app/bin/check-next-symlinks.sh

@@ -0,0 +1,44 @@
+#!/bin/bash
+# Check that all .next/node_modules/ symlinks resolve correctly after assemble-prod.sh.
+# Usage: bash apps/app/bin/check-next-symlinks.sh (from monorepo root)
+set -euo pipefail
+
+NEXT_MODULES="apps/app/.next/node_modules"
+
+# Packages that are intentionally broken symlinks.
+# These are only imported via useEffect + dynamic import() and never accessed during SSR.
+ALLOWED_BROKEN=(
+  fslightbox-react
+  @emoji-mart/data
+  @emoji-mart/react
+  socket.io-client
+)
+
+# Build a grep -v pattern from the allowlist
+grep_args=()
+for pkg in "${ALLOWED_BROKEN[@]}"; do
+  grep_args+=(-e "$pkg")
+done
+
+broken=$(find "$NEXT_MODULES" -maxdepth 2 -type l | while read -r link; do
+  linkdir=$(dirname "$link")
+  target=$(readlink "$link")
+  resolved=$(cd "$linkdir" 2>/dev/null && realpath -m "$target" 2>/dev/null || echo "UNRESOLVABLE")
+  { [ "$resolved" = "UNRESOLVABLE" ] || [ ! -e "$resolved" ]; } && echo "BROKEN: $link"
+done | grep -v "${grep_args[@]}" || true)
+
+if [ -n "$broken" ]; then
+  echo "ERROR: Broken symlinks found in $NEXT_MODULES:"
+  echo "$broken"
+  echo ""
+  echo "Each broken package must be either:"
+  echo "  1. Moved from devDependencies to dependencies in apps/app/package.json"
+  echo "  2. Added to ALLOWED_BROKEN in this script (if only used via useEffect + dynamic import)"
+  echo ""
+  echo "Fix: Follow the step-by-step procedure in apps/app/.claude/skills/learned/fix-broken-next-symlinks/SKILL.md"
+  echo "     You MUST execute every step in order — do NOT skip assemble-prod.sh when verifying."
+  echo "Ref: apps/app/.claude/rules/package-dependencies.md"
+  exit 1
+fi
+
+echo "OK: All $NEXT_MODULES symlinks resolve correctly."

+ 60 - 0
apps/app/bin/measure-chunk-stats.sh

@@ -0,0 +1,60 @@
+#!/usr/bin/env bash
+# Measure ChunkModuleStats (initial / async-only / total) for [[...path]] page.
+# Usage: ./bin/measure-chunk-stats.sh [port]
+set -euo pipefail
+
+PORT="${1:-3099}"
+LOG=$(mktemp /tmp/chunk-stats-XXXXXX.log)
+
+cleanup() {
+  local pids
+  pids=$(lsof -ti :"$PORT" 2>/dev/null || true)
+  if [ -n "$pids" ]; then
+    kill -9 $pids 2>/dev/null || true
+  fi
+  rm -f "$LOG"
+}
+trap cleanup EXIT
+
+# 1. Ensure port is free
+cleanup_pids=$(lsof -ti :"$PORT" 2>/dev/null || true)
+if [ -n "$cleanup_pids" ]; then
+  kill -9 $cleanup_pids 2>/dev/null || true
+  sleep 1
+fi
+
+# 2. Clean .next dev cache (v16 uses .next/dev for isolated dev builds)
+rm -rf "$(dirname "$0")/../.next/dev"
+
+# 3. Start Next.js dev server (--webpack to opt out of Turbopack default in v16)
+cd "$(dirname "$0")/.."
+npx next dev --webpack -p "$PORT" > "$LOG" 2>&1 &
+NEXT_PID=$!
+
+# 4. Wait for server ready
+echo "Waiting for Next.js to start on port $PORT ..."
+for i in $(seq 1 30); do
+  if grep -q "Local:" "$LOG" 2>/dev/null; then
+    break
+  fi
+  sleep 1
+done
+
+# 5. Trigger compilation
+echo "Triggering compilation ..."
+curl -s -o /dev/null http://localhost:"$PORT"/
+
+# 6. Wait for ChunkModuleStats output (non-zero initial)
+echo "Waiting for compilation ..."
+for i in $(seq 1 120); do
+  if grep -qP 'ChunkModuleStats\] initial: [1-9]' "$LOG" 2>/dev/null; then
+    break
+  fi
+  sleep 2
+done
+
+# 7. Print results
+echo ""
+echo "=== Results ==="
+grep -E 'ChunkModuleStats|Compiled.*modules' "$LOG" | grep -v 'initial: 0,' | head -5
+echo ""

+ 13 - 11
apps/app/bin/openapi/definition-apiv3.js

@@ -51,6 +51,18 @@ module.exports = {
         name: 'x-growi-transfer-key',
       },
     },
+    parameters: {
+      MimeTypePathParam: {
+        name: 'mimeType',
+        in: 'path',
+        required: true,
+        description: 'The MIME type to configure.',
+        schema: {
+          type: 'string',
+          example: 'image/png',
+        },
+      },
+    },
   },
   'x-tagGroups': [
     {
@@ -66,21 +78,11 @@ module.exports = {
         'ShareLinks',
         'Users',
         'UserUISettings',
-        '',
       ],
     },
     {
       name: 'User Personal Settings API',
-      tags: [
-        'GeneralSetting',
-        'EditorSetting',
-        'InAppNotificationSettings',
-        '',
-        '',
-        '',
-        '',
-        '',
-      ],
+      tags: ['GeneralSetting', 'EditorSetting', 'InAppNotificationSettings'],
     },
     {
       name: 'System Management API',

+ 2 - 8
apps/app/config/next-i18next.config.js

@@ -1,5 +1,4 @@
 const isDev = process.env.NODE_ENV === 'development';
-
 // biome-ignore lint/style/useNodejsImportProtocol: ignore
 const path = require('path');
 
@@ -8,8 +7,6 @@ const { isServer } = require('@growi/core/dist/utils');
 
 const { defaultLang } = require('./i18next.config');
 
-const HMRPlugin = isDev ? require('i18next-hmr/plugin').HMRPlugin : undefined;
-
 /** @type {import('next-i18next').UserConfig} */
 module.exports = {
   ...require('./i18next.config').initOptions,
@@ -24,11 +21,8 @@ module.exports = {
 
   use: isDev
     ? isServer()
-      ? [new HMRPlugin({ webpack: { server: true } })]
-      : [
-          require('i18next-chained-backend').default,
-          new HMRPlugin({ webpack: { client: true } }),
-        ]
+      ? []
+      : [require('i18next-chained-backend').default]
     : [],
   backend: {
     backends: isServer()

+ 79 - 51
apps/app/docker/Dockerfile

@@ -1,110 +1,138 @@
-# syntax = docker/dockerfile:1.4
+# syntax=docker/dockerfile:1
 
+ARG NODE_VERSION=24
 ARG OPT_DIR="/opt"
 ARG PNPM_HOME="/root/.local/share/pnpm"
 
 ##
-## base
+## base — DHI dev image with pnpm + turbo
 ##
-FROM node:20-slim AS base
+FROM dhi.io/node:24-debian13-dev AS base
 
 ARG OPT_DIR
 ARG PNPM_HOME
 
 WORKDIR $OPT_DIR
 
-# install tools
+# Install build dependencies
 RUN --mount=type=cache,target=/var/lib/apt,sharing=locked \
     --mount=type=cache,target=/var/cache/apt,sharing=locked \
-  apt-get update && apt-get install -y ca-certificates wget curl --no-install-recommends
+  apt-get update && apt-get install -y --no-install-recommends ca-certificates wget
 
-# install pnpm
-RUN wget -qO- https://get.pnpm.io/install.sh | ENV="$HOME/.shrc" SHELL="$(which sh)" PNPM_VERSION="10.4.1" sh -
+# Install pnpm (standalone script, no version hardcoding)
+RUN wget -qO- https://get.pnpm.io/install.sh | ENV="$HOME/.shrc" SHELL=/bin/sh sh -
 ENV PNPM_HOME=$PNPM_HOME
 ENV PATH="$PNPM_HOME:$PATH"
 
-# install turbo
+# Install turbo globally
 RUN --mount=type=cache,target=$PNPM_HOME/store,sharing=locked \
   pnpm add turbo --global
 
 
+##
+## pruner — turbo prune for Docker-optimized monorepo subset
+##
+FROM base AS pruner
+
+ARG OPT_DIR
+
+WORKDIR $OPT_DIR
+
+COPY . .
+
+# Include @growi/pdf-converter because @growi/pdf-converter-client has a turbo
+# task dependency on @growi/pdf-converter#gen:swagger-spec (generates the OpenAPI
+# spec that orval uses to build the client). Without it, turbo cannot resolve
+# the cross-package task dependency in the pruned workspace.
+RUN turbo prune @growi/app @growi/pdf-converter --docker
+
 
 ##
-## builder
+## deps — dependency installation (layer cached when only source changes)
 ##
-FROM base AS builder
+FROM base AS deps
+
+ARG OPT_DIR
+ARG PNPM_HOME
 
 ENV PNPM_HOME=$PNPM_HOME
 ENV PATH="$PNPM_HOME:$PATH"
 
 WORKDIR $OPT_DIR
 
-COPY . .
+# Copy only package manifests and lockfile for dependency caching
+COPY --from=pruner $OPT_DIR/out/json/ .
 
+# Install build tools and dependencies
 RUN --mount=type=cache,target=$PNPM_HOME/store,sharing=locked \
   pnpm add node-gyp --global
 RUN --mount=type=cache,target=$PNPM_HOME/store,sharing=locked \
-  pnpm install ---frozen-lockfile
+  pnpm install --frozen-lockfile
 
-# build
+
+##
+## builder — build + produce artifacts
+##
+FROM deps AS builder
+
+ARG OPT_DIR
+
+WORKDIR $OPT_DIR
+
+# Copy full source on top of installed dependencies
+COPY --from=pruner $OPT_DIR/out/full/ .
+
+# turbo prune does not include root-level config files in its output.
+# tsconfig.base.json is referenced by most packages via "extends": "../../tsconfig.base.json"
+COPY tsconfig.base.json .
+
+# Build
 RUN turbo run clean
 RUN turbo run build --filter @growi/app
 
-# make artifacts
-RUN pnpm deploy out --prod --filter @growi/app
-RUN rm -rf apps/app/node_modules && mv out/node_modules apps/app/node_modules
-RUN rm -rf apps/app/.next/cache
-RUN tar -zcf /tmp/packages.tar.gz \
-  package.json \
-  apps/app/.next \
-  apps/app/config \
-  apps/app/dist \
-  apps/app/public \
-  apps/app/resource \
-  apps/app/tmp \
-  apps/app/.env.production* \
-  apps/app/next.config.js \
-  apps/app/package.json \
-  apps/app/node_modules
+# Produce artifacts
+RUN bash apps/app/bin/assemble-prod.sh
 
+# Stage artifacts into a clean directory for COPY --from
+RUN mkdir -p /tmp/release/apps/app && \
+  cp package.json /tmp/release/ && \
+  cp -a node_modules /tmp/release/ && \
+  cp -a apps/app/.next apps/app/config apps/app/dist apps/app/public \
+       apps/app/resource apps/app/tmp \
+       apps/app/package.json apps/app/node_modules \
+       apps/app/next.config.js \
+       /tmp/release/apps/app/ && \
+  (cp apps/app/.env.production* /tmp/release/apps/app/ 2>/dev/null || true)
 
 
 ##
-## release
+## release — DHI runtime (no shell, no additional binaries)
 ##
-FROM node:20-slim
-LABEL maintainer="Yuki Takei <yuki@weseek.co.jp>"
+FROM dhi.io/node:24-debian13 AS release
 
 ARG OPT_DIR
 
 ENV NODE_ENV="production"
-
 ENV appDir="$OPT_DIR/growi"
 
-# Add gosu
-# see: https://github.com/tianon/gosu/blob/1.13/INSTALL.md
-RUN --mount=type=cache,target=/var/lib/apt,sharing=locked \
-    --mount=type=cache,target=/var/cache/apt,sharing=locked \
-  set -eux; \
-	apt-get update; \
-	apt-get install -y gosu; \
-	rm -rf /var/lib/apt/lists/*; \
-# verify that the binary works
-	gosu nobody true
-
-# extract artifacts as 'node' user
-USER node
+# Copy artifacts from builder (no shell required)
 WORKDIR ${appDir}
-RUN --mount=type=bind,from=builder,source=/tmp/packages.tar.gz,target=/tmp/packages.tar.gz \
-  tar -zxf /tmp/packages.tar.gz -C ${appDir}/
+COPY --from=builder --chown=node:node /tmp/release/ ${appDir}/
 
-COPY --chown=node:node --chmod=700 apps/app/docker/docker-entrypoint.sh /
+# Copy TypeScript entrypoint
+COPY --chown=node:node apps/app/docker/docker-entrypoint.ts /docker-entrypoint.ts
 
+# Switch back to root for entrypoint (it handles privilege drop)
 USER root
 WORKDIR ${appDir}/apps/app
 
+# OCI standard labels
+LABEL org.opencontainers.image.source="https://github.com/weseek/growi"
+LABEL org.opencontainers.image.title="GROWI"
+LABEL org.opencontainers.image.description="Team collaboration wiki using Markdown"
+LABEL org.opencontainers.image.vendor="WESEEK, Inc."
+
 VOLUME /data
 EXPOSE 3000
 
-ENTRYPOINT ["/docker-entrypoint.sh"]
-CMD ["npm run migrate && node -r dotenv-flow/config --expose_gc dist/server/app.js"]
+ENTRYPOINT ["node", "/docker-entrypoint.ts"]

+ 74 - 4
apps/app/docker/Dockerfile.dockerignore

@@ -1,9 +1,79 @@
+# ============================================================
+# Build artifacts and caches
+# ============================================================
 **/node_modules
-**/coverage
-**/Dockerfile
-**/*.dockerignore
-**/.pnpm-store
 **/.next
 **/.turbo
+**/.pnpm-store
+**/coverage
 out
+
+# ============================================================
+# Version control
+# ============================================================
+.git
+
+# ============================================================
+# Docker files (prevent recursive inclusion)
+# ============================================================
+**/Dockerfile
+**/*.dockerignore
+
+# ============================================================
+# Unrelated apps
+# ============================================================
 apps/slackbot-proxy
+
+# ============================================================
+# Test files
+# ============================================================
+**/*.spec.*
+**/*.test.*
+**/test/
+**/__tests__/
+**/playwright/
+
+# ============================================================
+# Documentation (no .md files are needed for build)
+# ============================================================
+**/*.md
+
+# ============================================================
+# Local environment overrides
+# ============================================================
+.env.local
+.env.*.local
+
+# ============================================================
+# IDE and editor settings
+# ============================================================
+.vscode
+.idea
+**/.DS_Store
+
+# ============================================================
+# CI/CD, DevOps, and project management
+# ============================================================
+.changeset
+.devcontainer
+.github
+aws
+bin
+
+# ============================================================
+# Linter, formatter, and tool configs (not needed for build)
+# ============================================================
+**/.editorconfig
+**/.markdownlint.yml
+**/.prettier*
+**/.stylelintrc*
+**/biome.json
+**/lefthook.yml
+
+# ============================================================
+# AI agent configuration
+# ============================================================
+**/.claude
+**/.kiro
+**/.mcp.json
+**/.serena

+ 11 - 1
apps/app/docker/README.md

@@ -10,7 +10,7 @@ GROWI Official docker image
 Supported tags and respective Dockerfile links
 ------------------------------------------------
 
-* [`7.4.4`, `7.4`, `7`, `latest` (Dockerfile)](https://github.com/growilabs/growi/blob/v7.4.4/apps/app/docker/Dockerfile)
+* [`7.4.7`, `7.4`, `7`, `latest` (Dockerfile)](https://github.com/growilabs/growi/blob/v7.4.7/apps/app/docker/Dockerfile)
 * [`7.3.0`, `7.3` (Dockerfile)](https://github.com/growilabs/growi/blob/v7.3.0/apps/app/docker/Dockerfile)
 * [`7.2.0`, `7.2` (Dockerfile)](https://github.com/growilabs/growi/blob/v7.2.0/apps/app/docker/Dockerfile)
 
@@ -72,6 +72,16 @@ See [GROWI Docs: Admin Guide](https://docs.growi.org/en/admin-guide/) ([en](http
 
 - [GROWI Docs: Environment Variables](https://docs.growi.org/en/admin-guide/admin-cookbook/env-vars.html)
 
+#### V8 Memory Management
+
+| Variable | Type | Default | Description |
+|----------|------|---------|-------------|
+| `V8_MAX_HEAP_SIZE` | int (MB) | (unset) | Explicitly specify the `--max-heap-size` value for Node.js |
+| `V8_OPTIMIZE_FOR_SIZE` | `"true"` / (unset) | (unset) | Enable the `--optimize-for-size` V8 flag to reduce memory usage |
+| `V8_LITE_MODE` | `"true"` / (unset) | (unset) | Enable the `--lite-mode` V8 flag to reduce memory usage at the cost of performance |
+
+**Heap size fallback behavior**: When `V8_MAX_HEAP_SIZE` is not set, the entrypoint automatically detects the container's memory limit via cgroup (v2/v1) and sets the heap size to 60% of the limit. If no cgroup limit is detected, V8's default heap behavior is used.
+
 
 Issues
 ------

+ 2 - 0
apps/app/docker/codebuild/buildspec.yml

@@ -12,6 +12,8 @@ phases:
     commands:
       # login to docker.io
       - echo ${DOCKER_REGISTRY_PASSWORD} | docker login --username growimoogle --password-stdin
+      # login to dhi.io (DHI uses Docker Hub credentials)
+      - echo ${DOCKER_REGISTRY_PASSWORD} | docker login dhi.io --username growimoogle --password-stdin
   build:
     commands:
       - docker build -t ${IMAGE_TAG} -f ./apps/app/docker/Dockerfile .

+ 0 - 18
apps/app/docker/docker-entrypoint.sh

@@ -1,18 +0,0 @@
-#!/bin/sh
-
-set -e
-
-# Support `FILE_UPLOAD=local`
-mkdir -p /data/uploads
-if [ ! -e "./public/uploads" ]; then
-  ln -s /data/uploads ./public/uploads
-fi
-chown -R node:node /data/uploads
-chown -h node:node ./public/uploads
-
-# Set permissions for shared directory for bulk export
-mkdir -p /tmp/page-bulk-export
-chown -R node:node /tmp/page-bulk-export
-chmod 700 /tmp/page-bulk-export
-
-exec gosu node /bin/bash -c "$@"

+ 358 - 0
apps/app/docker/docker-entrypoint.spec.ts

@@ -0,0 +1,358 @@
+import fs from 'node:fs';
+import os from 'node:os';
+import path from 'node:path';
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
+
+import {
+  buildNodeFlags,
+  chownRecursive,
+  detectHeapSize,
+  readCgroupLimit,
+  setupDirectories,
+} from './docker-entrypoint';
+
+describe('chownRecursive', () => {
+  let tmpDir: string;
+
+  beforeEach(() => {
+    tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'entrypoint-test-'));
+  });
+
+  afterEach(() => {
+    fs.rmSync(tmpDir, { recursive: true, force: true });
+  });
+
+  it('should chown a flat directory', () => {
+    const chownSyncSpy = vi.spyOn(fs, 'chownSync').mockImplementation(() => {});
+    chownRecursive(tmpDir, 1000, 1000);
+    // Should chown the directory itself
+    expect(chownSyncSpy).toHaveBeenCalledWith(tmpDir, 1000, 1000);
+    chownSyncSpy.mockRestore();
+  });
+
+  it('should chown nested directories and files recursively', () => {
+    // Create nested structure
+    const subDir = path.join(tmpDir, 'sub');
+    fs.mkdirSync(subDir);
+    fs.writeFileSync(path.join(tmpDir, 'file1.txt'), 'hello');
+    fs.writeFileSync(path.join(subDir, 'file2.txt'), 'world');
+
+    const chownedPaths: string[] = [];
+    const chownSyncSpy = vi.spyOn(fs, 'chownSync').mockImplementation((p) => {
+      chownedPaths.push(p as string);
+    });
+
+    chownRecursive(tmpDir, 1000, 1000);
+
+    expect(chownedPaths).toContain(tmpDir);
+    expect(chownedPaths).toContain(subDir);
+    expect(chownedPaths).toContain(path.join(tmpDir, 'file1.txt'));
+    expect(chownedPaths).toContain(path.join(subDir, 'file2.txt'));
+    expect(chownedPaths).toHaveLength(4);
+
+    chownSyncSpy.mockRestore();
+  });
+
+  it('should handle empty directory', () => {
+    const chownSyncSpy = vi.spyOn(fs, 'chownSync').mockImplementation(() => {});
+    chownRecursive(tmpDir, 1000, 1000);
+    // Should only chown the directory itself
+    expect(chownSyncSpy).toHaveBeenCalledTimes(1);
+    expect(chownSyncSpy).toHaveBeenCalledWith(tmpDir, 1000, 1000);
+    chownSyncSpy.mockRestore();
+  });
+});
+
+describe('readCgroupLimit', () => {
+  it('should read cgroup v2 numeric limit', () => {
+    const readSpy = vi
+      .spyOn(fs, 'readFileSync')
+      .mockReturnValue('1073741824\n');
+    const result = readCgroupLimit('/sys/fs/cgroup/memory.max');
+    expect(result).toBe(1073741824);
+    readSpy.mockRestore();
+  });
+
+  it('should return undefined for cgroup v2 "max" (unlimited)', () => {
+    const readSpy = vi.spyOn(fs, 'readFileSync').mockReturnValue('max\n');
+    const result = readCgroupLimit('/sys/fs/cgroup/memory.max');
+    expect(result).toBeUndefined();
+    readSpy.mockRestore();
+  });
+
+  it('should return undefined when file does not exist', () => {
+    const readSpy = vi.spyOn(fs, 'readFileSync').mockImplementation(() => {
+      throw new Error('ENOENT');
+    });
+    const result = readCgroupLimit('/sys/fs/cgroup/memory.max');
+    expect(result).toBeUndefined();
+    readSpy.mockRestore();
+  });
+
+  it('should return undefined for NaN content', () => {
+    const readSpy = vi.spyOn(fs, 'readFileSync').mockReturnValue('invalid\n');
+    const result = readCgroupLimit('/sys/fs/cgroup/memory.max');
+    expect(result).toBeUndefined();
+    readSpy.mockRestore();
+  });
+});
+
+describe('detectHeapSize', () => {
+  const originalEnv = process.env;
+
+  beforeEach(() => {
+    process.env = { ...originalEnv };
+  });
+
+  afterEach(() => {
+    process.env = originalEnv;
+  });
+
+  it('should use V8_MAX_HEAP_SIZE when set', () => {
+    process.env.V8_MAX_HEAP_SIZE = '512';
+    const readSpy = vi.spyOn(fs, 'readFileSync');
+    const result = detectHeapSize();
+    expect(result).toBe(512);
+    // Should not attempt to read cgroup files
+    expect(readSpy).not.toHaveBeenCalled();
+    readSpy.mockRestore();
+  });
+
+  it('should return undefined for invalid V8_MAX_HEAP_SIZE', () => {
+    process.env.V8_MAX_HEAP_SIZE = 'abc';
+    const readSpy = vi.spyOn(fs, 'readFileSync').mockImplementation(() => {
+      throw new Error('ENOENT');
+    });
+    const result = detectHeapSize();
+    expect(result).toBeUndefined();
+    readSpy.mockRestore();
+  });
+
+  it('should return undefined for empty V8_MAX_HEAP_SIZE', () => {
+    process.env.V8_MAX_HEAP_SIZE = '';
+    const readSpy = vi.spyOn(fs, 'readFileSync').mockImplementation(() => {
+      throw new Error('ENOENT');
+    });
+    const result = detectHeapSize();
+    expect(result).toBeUndefined();
+    readSpy.mockRestore();
+  });
+
+  it('should auto-calculate from cgroup v2 at 60%', () => {
+    delete process.env.V8_MAX_HEAP_SIZE;
+    // 1GB = 1073741824 bytes → 60% ≈ 614 MB
+    const readSpy = vi
+      .spyOn(fs, 'readFileSync')
+      .mockImplementation((filePath) => {
+        if (filePath === '/sys/fs/cgroup/memory.max') return '1073741824\n';
+        throw new Error('ENOENT');
+      });
+    const result = detectHeapSize();
+    expect(result).toBe(Math.floor((1073741824 / 1024 / 1024) * 0.6));
+    readSpy.mockRestore();
+  });
+
+  it('should fallback to cgroup v1 when v2 is unlimited', () => {
+    delete process.env.V8_MAX_HEAP_SIZE;
+    // v2 = max (unlimited), v1 = 2GB
+    const readSpy = vi
+      .spyOn(fs, 'readFileSync')
+      .mockImplementation((filePath) => {
+        if (filePath === '/sys/fs/cgroup/memory.max') return 'max\n';
+        if (filePath === '/sys/fs/cgroup/memory/memory.limit_in_bytes')
+          return '2147483648\n';
+        throw new Error('ENOENT');
+      });
+    const result = detectHeapSize();
+    expect(result).toBe(Math.floor((2147483648 / 1024 / 1024) * 0.6));
+    readSpy.mockRestore();
+  });
+
+  it('should treat cgroup v1 > 64GB as unlimited', () => {
+    delete process.env.V8_MAX_HEAP_SIZE;
+    const hugeValue = 128 * 1024 * 1024 * 1024; // 128GB
+    const readSpy = vi
+      .spyOn(fs, 'readFileSync')
+      .mockImplementation((filePath) => {
+        if (filePath === '/sys/fs/cgroup/memory.max') return 'max\n';
+        if (filePath === '/sys/fs/cgroup/memory/memory.limit_in_bytes')
+          return `${hugeValue}\n`;
+        throw new Error('ENOENT');
+      });
+    const result = detectHeapSize();
+    expect(result).toBeUndefined();
+    readSpy.mockRestore();
+  });
+
+  it('should return undefined when no cgroup limits detected', () => {
+    delete process.env.V8_MAX_HEAP_SIZE;
+    const readSpy = vi.spyOn(fs, 'readFileSync').mockImplementation(() => {
+      throw new Error('ENOENT');
+    });
+    const result = detectHeapSize();
+    expect(result).toBeUndefined();
+    readSpy.mockRestore();
+  });
+
+  it('should prioritize V8_MAX_HEAP_SIZE over cgroup', () => {
+    process.env.V8_MAX_HEAP_SIZE = '256';
+    const readSpy = vi
+      .spyOn(fs, 'readFileSync')
+      .mockReturnValue('1073741824\n');
+    const result = detectHeapSize();
+    expect(result).toBe(256);
+    // Should not have read cgroup files
+    expect(readSpy).not.toHaveBeenCalled();
+    readSpy.mockRestore();
+  });
+});
+
+describe('buildNodeFlags', () => {
+  const originalEnv = process.env;
+
+  beforeEach(() => {
+    process.env = { ...originalEnv };
+  });
+
+  afterEach(() => {
+    process.env = originalEnv;
+  });
+
+  it('should always include --expose_gc', () => {
+    const flags = buildNodeFlags(undefined);
+    expect(flags).toContain('--expose_gc');
+  });
+
+  it('should include --max-heap-size when heapSize is provided', () => {
+    const flags = buildNodeFlags(512);
+    expect(flags).toContain('--max-heap-size=512');
+  });
+
+  it('should not include --max-heap-size when heapSize is undefined', () => {
+    const flags = buildNodeFlags(undefined);
+    expect(flags.some((f) => f.startsWith('--max-heap-size'))).toBe(false);
+  });
+
+  it('should include --optimize-for-size when V8_OPTIMIZE_FOR_SIZE=true', () => {
+    process.env.V8_OPTIMIZE_FOR_SIZE = 'true';
+    const flags = buildNodeFlags(undefined);
+    expect(flags).toContain('--optimize-for-size');
+  });
+
+  it('should not include --optimize-for-size when V8_OPTIMIZE_FOR_SIZE is not true', () => {
+    process.env.V8_OPTIMIZE_FOR_SIZE = 'false';
+    const flags = buildNodeFlags(undefined);
+    expect(flags).not.toContain('--optimize-for-size');
+  });
+
+  it('should include --lite-mode when V8_LITE_MODE=true', () => {
+    process.env.V8_LITE_MODE = 'true';
+    const flags = buildNodeFlags(undefined);
+    expect(flags).toContain('--lite-mode');
+  });
+
+  it('should not include --lite-mode when V8_LITE_MODE is not true', () => {
+    delete process.env.V8_LITE_MODE;
+    const flags = buildNodeFlags(undefined);
+    expect(flags).not.toContain('--lite-mode');
+  });
+
+  it('should combine all flags when all options enabled', () => {
+    process.env.V8_OPTIMIZE_FOR_SIZE = 'true';
+    process.env.V8_LITE_MODE = 'true';
+    const flags = buildNodeFlags(256);
+    expect(flags).toContain('--expose_gc');
+    expect(flags).toContain('--max-heap-size=256');
+    expect(flags).toContain('--optimize-for-size');
+    expect(flags).toContain('--lite-mode');
+  });
+
+  it('should not use --max_old_space_size', () => {
+    const flags = buildNodeFlags(512);
+    expect(flags.some((f) => f.includes('max_old_space_size'))).toBe(false);
+  });
+});
+
+describe('setupDirectories', () => {
+  let tmpDir: string;
+
+  beforeEach(() => {
+    tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'entrypoint-setup-'));
+  });
+
+  afterEach(() => {
+    fs.rmSync(tmpDir, { recursive: true, force: true });
+  });
+
+  it('should create uploads directory and symlink', () => {
+    const uploadsDir = path.join(tmpDir, 'data', 'uploads');
+    const publicUploads = path.join(tmpDir, 'public', 'uploads');
+    fs.mkdirSync(path.join(tmpDir, 'public'), { recursive: true });
+
+    const chownSyncSpy = vi.spyOn(fs, 'chownSync').mockImplementation(() => {});
+    const lchownSyncSpy = vi
+      .spyOn(fs, 'lchownSync')
+      .mockImplementation(() => {});
+
+    setupDirectories(
+      uploadsDir,
+      publicUploads,
+      path.join(tmpDir, 'bulk-export'),
+    );
+
+    expect(fs.existsSync(uploadsDir)).toBe(true);
+    expect(fs.lstatSync(publicUploads).isSymbolicLink()).toBe(true);
+    expect(fs.readlinkSync(publicUploads)).toBe(uploadsDir);
+
+    chownSyncSpy.mockRestore();
+    lchownSyncSpy.mockRestore();
+  });
+
+  it('should not recreate symlink if it already exists', () => {
+    const uploadsDir = path.join(tmpDir, 'data', 'uploads');
+    const publicUploads = path.join(tmpDir, 'public', 'uploads');
+    fs.mkdirSync(path.join(tmpDir, 'public'), { recursive: true });
+    fs.mkdirSync(uploadsDir, { recursive: true });
+    fs.symlinkSync(uploadsDir, publicUploads);
+
+    const symlinkSpy = vi.spyOn(fs, 'symlinkSync');
+    const chownSyncSpy = vi.spyOn(fs, 'chownSync').mockImplementation(() => {});
+    const lchownSyncSpy = vi
+      .spyOn(fs, 'lchownSync')
+      .mockImplementation(() => {});
+
+    setupDirectories(
+      uploadsDir,
+      publicUploads,
+      path.join(tmpDir, 'bulk-export'),
+    );
+
+    expect(symlinkSpy).not.toHaveBeenCalled();
+
+    symlinkSpy.mockRestore();
+    chownSyncSpy.mockRestore();
+    lchownSyncSpy.mockRestore();
+  });
+
+  it('should create bulk export directory with permissions', () => {
+    const bulkExportDir = path.join(tmpDir, 'bulk-export');
+    fs.mkdirSync(path.join(tmpDir, 'public'), { recursive: true });
+    const chownSyncSpy = vi.spyOn(fs, 'chownSync').mockImplementation(() => {});
+    const lchownSyncSpy = vi
+      .spyOn(fs, 'lchownSync')
+      .mockImplementation(() => {});
+
+    setupDirectories(
+      path.join(tmpDir, 'data', 'uploads'),
+      path.join(tmpDir, 'public', 'uploads'),
+      bulkExportDir,
+    );
+
+    expect(fs.existsSync(bulkExportDir)).toBe(true);
+    const stat = fs.statSync(bulkExportDir);
+    expect(stat.mode & 0o777).toBe(0o700);
+
+    chownSyncSpy.mockRestore();
+    lchownSyncSpy.mockRestore();
+  });
+});

+ 265 - 0
apps/app/docker/docker-entrypoint.ts

@@ -0,0 +1,265 @@
+/**
+ * Docker entrypoint for GROWI (TypeScript)
+ *
+ * Runs directly with Node.js 24 native type stripping.
+ * Uses only erasable TypeScript syntax (no enums, no namespaces).
+ *
+ * Responsibilities:
+ * - Directory setup (as root): /data/uploads, symlinks, /tmp/page-bulk-export
+ * - Heap size detection: V8_MAX_HEAP_SIZE → cgroup auto-calc → V8 default
+ * - Privilege drop: process.setgid + process.setuid (root → node)
+ * - Migration execution: execFileSync (no shell)
+ * - App process spawn: spawn with signal forwarding
+ */
+
+/** biome-ignore-all lint/suspicious/noConsole: Allow printing to console */
+
+import { execFileSync, spawn } from 'node:child_process';
+import fs from 'node:fs';
+
+// -- Constants --
+
+const NODE_UID = 1000;
+const NODE_GID = 1000;
+const CGROUP_V2_PATH = '/sys/fs/cgroup/memory.max';
+const CGROUP_V1_PATH = '/sys/fs/cgroup/memory/memory.limit_in_bytes';
+const CGROUP_V1_UNLIMITED_THRESHOLD = 64 * 1024 * 1024 * 1024; // 64GB
+const HEAP_RATIO = 0.6;
+
+// -- Exported utility functions --
+
+/**
+ * Recursively chown a directory and all its contents.
+ */
+export function chownRecursive(
+  dirPath: string,
+  uid: number,
+  gid: number,
+): void {
+  const entries = fs.readdirSync(dirPath, { withFileTypes: true });
+  for (const entry of entries) {
+    const fullPath = `${dirPath}/${entry.name}`;
+    if (entry.isDirectory()) {
+      chownRecursive(fullPath, uid, gid);
+    } else {
+      fs.chownSync(fullPath, uid, gid);
+    }
+  }
+  fs.chownSync(dirPath, uid, gid);
+}
+
+/**
+ * Read a cgroup memory limit file and return the numeric value in bytes.
+ * Returns undefined if the file cannot be read or the value is "max" / NaN.
+ */
+export function readCgroupLimit(filePath: string): number | undefined {
+  try {
+    const content = fs.readFileSync(filePath, 'utf-8').trim();
+    if (content === 'max') return undefined;
+    const value = parseInt(content, 10);
+    if (Number.isNaN(value)) return undefined;
+    return value;
+  } catch {
+    return undefined;
+  }
+}
+
+/**
+ * Detect heap size (MB) using 3-level fallback:
+ * 1. V8_MAX_HEAP_SIZE env var
+ * 2. cgroup v2/v1 auto-calculation (60% of limit)
+ * 3. undefined (V8 default)
+ */
+export function detectHeapSize(): number | undefined {
+  // Priority 1: V8_MAX_HEAP_SIZE env
+  const envValue = process.env.V8_MAX_HEAP_SIZE;
+  if (envValue != null && envValue !== '') {
+    const parsed = parseInt(envValue, 10);
+    if (Number.isNaN(parsed)) {
+      console.error(
+        `[entrypoint] V8_MAX_HEAP_SIZE="${envValue}" is not a valid number, ignoring`,
+      );
+      return undefined;
+    }
+    return parsed;
+  }
+
+  // Priority 2: cgroup v2
+  const cgroupV2 = readCgroupLimit(CGROUP_V2_PATH);
+  if (cgroupV2 != null) {
+    return Math.floor((cgroupV2 / 1024 / 1024) * HEAP_RATIO);
+  }
+
+  // Priority 3: cgroup v1 (treat > 64GB as unlimited)
+  const cgroupV1 = readCgroupLimit(CGROUP_V1_PATH);
+  if (cgroupV1 != null && cgroupV1 < CGROUP_V1_UNLIMITED_THRESHOLD) {
+    return Math.floor((cgroupV1 / 1024 / 1024) * HEAP_RATIO);
+  }
+
+  // Priority 4: V8 default
+  return undefined;
+}
+
+/**
+ * Build Node.js flags array based on heap size and environment variables.
+ */
+export function buildNodeFlags(heapSize: number | undefined): string[] {
+  const flags: string[] = ['--expose_gc'];
+
+  if (heapSize != null) {
+    flags.push(`--max-heap-size=${heapSize}`);
+  }
+
+  if (process.env.V8_OPTIMIZE_FOR_SIZE === 'true') {
+    flags.push('--optimize-for-size');
+  }
+
+  if (process.env.V8_LITE_MODE === 'true') {
+    flags.push('--lite-mode');
+  }
+
+  return flags;
+}
+
+/**
+ * Setup required directories (as root).
+ * - /data/uploads with symlink to ./public/uploads
+ * - /tmp/page-bulk-export with mode 700
+ */
+export function setupDirectories(
+  uploadsDir: string,
+  publicUploadsLink: string,
+  bulkExportDir: string,
+): void {
+  // /data/uploads
+  fs.mkdirSync(uploadsDir, { recursive: true });
+  if (!fs.existsSync(publicUploadsLink)) {
+    fs.symlinkSync(uploadsDir, publicUploadsLink);
+  }
+  chownRecursive(uploadsDir, NODE_UID, NODE_GID);
+  fs.lchownSync(publicUploadsLink, NODE_UID, NODE_GID);
+
+  // /tmp/page-bulk-export
+  fs.mkdirSync(bulkExportDir, { recursive: true });
+  chownRecursive(bulkExportDir, NODE_UID, NODE_GID);
+  fs.chmodSync(bulkExportDir, 0o700);
+}
+
+/**
+ * Drop privileges from root to node user.
+ * These APIs are POSIX-only and guaranteed to exist in the Docker container (Linux).
+ */
+export function dropPrivileges(): void {
+  if (process.setgid == null || process.setuid == null) {
+    throw new Error('Privilege drop APIs not available (non-POSIX platform)');
+  }
+  process.setgid(NODE_GID);
+  process.setuid(NODE_UID);
+}
+
+/**
+ * Log applied Node.js flags to stdout.
+ */
+function logFlags(heapSize: number | undefined, flags: string[]): void {
+  const source = (() => {
+    if (
+      process.env.V8_MAX_HEAP_SIZE != null &&
+      process.env.V8_MAX_HEAP_SIZE !== ''
+    ) {
+      return 'V8_MAX_HEAP_SIZE env';
+    }
+    if (heapSize != null) return 'cgroup auto-detection';
+    return 'V8 default (no heap limit)';
+  })();
+
+  console.log(`[entrypoint] Heap size source: ${source}`);
+  console.log(`[entrypoint] Node.js flags: ${flags.join(' ')}`);
+}
+
+/**
+ * Run database migration via execFileSync (no shell needed).
+ * Equivalent to: node -r dotenv-flow/config node_modules/migrate-mongo/bin/migrate-mongo up -f config/migrate-mongo-config.js
+ */
+function runMigration(): void {
+  console.log('[entrypoint] Running migration...');
+  execFileSync(
+    process.execPath,
+    [
+      '-r',
+      'dotenv-flow/config',
+      'node_modules/migrate-mongo/bin/migrate-mongo',
+      'up',
+      '-f',
+      'config/migrate-mongo-config.js',
+    ],
+    {
+      stdio: 'inherit',
+      env: { ...process.env, NODE_ENV: 'production' },
+    },
+  );
+  console.log('[entrypoint] Migration completed');
+}
+
+/**
+ * Spawn the application process and forward signals.
+ */
+function spawnApp(nodeFlags: string[]): void {
+  const child = spawn(
+    process.execPath,
+    [...nodeFlags, '-r', 'dotenv-flow/config', 'dist/server/app.js'],
+    {
+      stdio: 'inherit',
+      env: { ...process.env, NODE_ENV: 'production' },
+    },
+  );
+
+  // PID 1 signal forwarding
+  const signals: NodeJS.Signals[] = ['SIGTERM', 'SIGINT', 'SIGHUP'];
+  for (const sig of signals) {
+    process.on(sig, () => child.kill(sig));
+  }
+
+  child.on('exit', (code: number | null, signal: NodeJS.Signals | null) => {
+    process.exit(code ?? (signal === 'SIGTERM' ? 0 : 1));
+  });
+}
+
+// -- Main entrypoint --
+
+function main(): void {
+  try {
+    // Step 1: Directory setup (as root)
+    setupDirectories(
+      '/data/uploads',
+      './public/uploads',
+      '/tmp/page-bulk-export',
+    );
+
+    // Step 2: Detect heap size and build flags
+    const heapSize = detectHeapSize();
+    const nodeFlags = buildNodeFlags(heapSize);
+    logFlags(heapSize, nodeFlags);
+
+    // Step 3: Drop privileges (root → node)
+    dropPrivileges();
+
+    // Step 4: Run migration
+    runMigration();
+
+    // Step 5: Start application
+    spawnApp(nodeFlags);
+  } catch (err) {
+    console.error('[entrypoint] Fatal error:', err);
+    process.exit(1);
+  }
+}
+
+// Run main only when executed directly (not when imported for testing)
+const isMainModule =
+  process.argv[1] != null &&
+  (process.argv[1].endsWith('docker-entrypoint.ts') ||
+    process.argv[1].endsWith('docker-entrypoint.js'));
+
+if (isMainModule) {
+  main();
+}

+ 0 - 5
apps/app/next-env.d.ts

@@ -1,5 +0,0 @@
-/// <reference types="next" />
-/// <reference types="next/image-types/global" />
-
-// NOTE: This file should not be edited
-// see https://nextjs.org/docs/pages/building-your-application/configuring/typescript for more information.

+ 0 - 173
apps/app/next.config.js

@@ -1,173 +0,0 @@
-/**
- * == Notes for production build==
- * The modules required from this file must be transpiled before running `next build`.
- *
- * See: https://github.com/vercel/next.js/discussions/35969#discussioncomment-2522954
- */
-
-const path = require('node:path');
-
-const { withSuperjson } = require('next-superjson');
-const {
-  PHASE_PRODUCTION_BUILD,
-  PHASE_PRODUCTION_SERVER,
-} = require('next/constants');
-
-const getTranspilePackages = () => {
-  const { listPrefixedPackages } = require('./src/utils/next.config.utils');
-
-  const packages = [
-    // listing ESM packages until experimental.esmExternals works correctly to avoid ERR_REQUIRE_ESM
-    'react-markdown',
-    'unified',
-    'markdown-table',
-    'bail',
-    'ccount',
-    'character-entities',
-    'character-entities-html4',
-    'character-entities-legacy',
-    'comma-separated-tokens',
-    'decode-named-character-reference',
-    'devlop',
-    'fault',
-    'escape-string-regexp',
-    'hastscript',
-    'html-void-elements',
-    'is-absolute-url',
-    'is-plain-obj',
-    'longest-streak',
-    'micromark',
-    'property-information',
-    'space-separated-tokens',
-    'stringify-entities',
-    'trim-lines',
-    'trough',
-    'web-namespaces',
-    'vfile',
-    'vfile-location',
-    'vfile-message',
-    'zwitch',
-    'emoticon',
-    'direction', // for hast-util-select
-    'bcp-47-match', // for hast-util-select
-    'parse-entities',
-    'character-reference-invalid',
-    'is-hexadecimal',
-    'is-alphabetical',
-    'is-alphanumerical',
-    'github-slugger',
-    'html-url-attributes',
-    'estree-util-is-identifier-name',
-    'superjson',
-    ...listPrefixedPackages([
-      'remark-',
-      'rehype-',
-      'hast-',
-      'mdast-',
-      'micromark-',
-      'unist-',
-    ]),
-  ];
-
-  // const eazyLogger = require('eazy-logger');
-  // const logger = eazyLogger.Logger({
-  //   prefix: '[{green:next.config.js}] ',
-  //   useLevelPrefixes: false,
-  // });
-  // logger.info('{bold:Listing scoped packages for transpiling:}');
-  // logger.unprefixed('info', `{grey:${JSON.stringify(packages, null, 2)}}`);
-
-  return packages;
-};
-
-const optimizePackageImports = [
-  '@growi/core',
-  '@growi/editor',
-  '@growi/pluginkit',
-  '@growi/presentation',
-  '@growi/preset-themes',
-  '@growi/remark-attachment-refs',
-  '@growi/remark-drawio',
-  '@growi/remark-growi-directive',
-  '@growi/remark-lsx',
-  '@growi/slack',
-  '@growi/ui',
-];
-
-module.exports = async (phase) => {
-  const { i18n, localePath } = require('./config/next-i18next.config');
-
-  /** @type {import('next').NextConfig} */
-  const nextConfig = {
-    reactStrictMode: true,
-    poweredByHeader: false,
-    pageExtensions: ['page.tsx', 'page.ts', 'page.jsx', 'page.js'],
-    i18n,
-
-    // for build
-    typescript: {
-      tsconfigPath: 'tsconfig.build.client.json',
-    },
-    transpilePackages:
-      phase !== PHASE_PRODUCTION_SERVER ? getTranspilePackages() : undefined,
-    experimental: {
-      optimizePackageImports,
-    },
-
-    /** @param config {import('next').NextConfig} */
-    webpack(config, options) {
-      if (!options.isServer) {
-        // Avoid "Module not found: Can't resolve 'fs'"
-        // See: https://stackoverflow.com/a/68511591
-        config.resolve.fallback.fs = false;
-
-        // exclude packages from the output bundles
-        config.module.rules.push(
-          ...[
-            /dtrace-provider/,
-            /mongoose/,
-            /mathjax-full/, // required from marp
-          ].map((packageRegExp) => {
-            return {
-              test: packageRegExp,
-              use: 'null-loader',
-            };
-          }),
-        );
-      }
-
-      // extract sourcemap
-      if (options.dev) {
-        config.module.rules.push({
-          test: /.(c|m)?js$/,
-          exclude: [/node_modules/, path.resolve(__dirname)],
-          enforce: 'pre',
-          use: ['source-map-loader'],
-        });
-      }
-
-      // setup i18next-hmr
-      if (!options.isServer && options.dev) {
-        const { I18NextHMRPlugin } = require('i18next-hmr/webpack');
-        config.plugins.push(new I18NextHMRPlugin({ localesDir: localePath }));
-      }
-
-      return config;
-    },
-  };
-
-  // production server
-  // Skip withSuperjson() in production server phase because the pages directory
-  // doesn't exist in the production build and withSuperjson() tries to find it
-  if (phase === PHASE_PRODUCTION_SERVER) {
-    return nextConfig;
-  }
-
-  const withBundleAnalyzer = require('@next/bundle-analyzer')({
-    enabled:
-      phase === PHASE_PRODUCTION_BUILD &&
-      (process.env.ANALYZE === 'true' || process.env.ANALYZE === '1'),
-  });
-
-  return withBundleAnalyzer(withSuperjson()(nextConfig));
-};

+ 26 - 0
apps/app/next.config.prod.cjs

@@ -0,0 +1,26 @@
+/**
+ * Minimal Next.js config for production runtime.
+ *
+ * next.config.ts is the authoritative config used at build time (Turbopack rules,
+ * transpilePackages, sassOptions, etc.).  However, Next.js 16 tries to transpile
+ * .ts configs at server startup, which fails in production where TypeScript is not
+ * installed.  assemble-prod.sh therefore deletes next.config.ts and renames this
+ * file to next.config.js so the production server can load the runtime-critical
+ * settings (i18n routing, pageExtensions, …) without a TypeScript toolchain.
+ *
+ * Keep the runtime-relevant values in sync with next.config.ts.
+ */
+
+const nextI18nConfig = require('./config/next-i18next.config');
+
+const { i18n } = nextI18nConfig;
+
+/** @type {import('next').NextConfig} */
+module.exports = {
+  reactStrictMode: true,
+  poweredByHeader: false,
+  pageExtensions: ['page.tsx', 'page.ts', 'page.jsx', 'page.js'],
+  i18n,
+
+  serverExternalPackages: ['handsontable'],
+};

+ 146 - 0
apps/app/next.config.ts

@@ -0,0 +1,146 @@
+/**
+ * == Notes for production build==
+ * The modules required from this file must be transpiled before running `next build`.
+ *
+ * See: https://github.com/vercel/next.js/discussions/35969#discussioncomment-2522954
+ */
+
+import type { NextConfig } from 'next';
+import path from 'node:path';
+
+import nextI18nConfig from './config/next-i18next.config';
+import { listPrefixedPackages } from './src/utils/next.config.utils';
+
+const { i18n } = nextI18nConfig;
+
+const getTranspilePackages = (): string[] => {
+  const packages = [
+    // listing ESM packages until experimental.esmExternals works correctly to avoid ERR_REQUIRE_ESM
+    'react-markdown',
+    'unified',
+    'markdown-table',
+    'bail',
+    'ccount',
+    'character-entities',
+    'character-entities-html4',
+    'character-entities-legacy',
+    'comma-separated-tokens',
+    'decode-named-character-reference',
+    'devlop',
+    'fault',
+    'hastscript',
+    'html-void-elements',
+    'is-absolute-url',
+    'is-plain-obj',
+    'longest-streak',
+    'micromark',
+    'property-information',
+    'space-separated-tokens',
+    'stringify-entities',
+    'trim-lines',
+    'trough',
+    'web-namespaces',
+    'vfile',
+    'vfile-location',
+    'vfile-message',
+    'zwitch',
+    'emoticon',
+    'direction', // for hast-util-select
+    'bcp-47-match', // for hast-util-select
+    'parse-entities',
+    'character-reference-invalid',
+    'is-hexadecimal',
+    'is-alphabetical',
+    'is-alphanumerical',
+    'github-slugger',
+    'html-url-attributes',
+    'estree-util-is-identifier-name',
+    'superjson',
+    ...listPrefixedPackages([
+      'remark-',
+      'rehype-',
+      'hast-',
+      'mdast-',
+      'micromark-',
+      'unist-',
+    ]),
+  ];
+
+  return packages;
+};
+
+const optimizePackageImports: string[] = [
+  '@growi/core',
+  '@growi/editor',
+  '@growi/pluginkit',
+  '@growi/presentation',
+  '@growi/preset-themes',
+  '@growi/remark-attachment-refs',
+  '@growi/remark-drawio',
+  '@growi/remark-growi-directive',
+  '@growi/remark-lsx',
+  '@growi/slack',
+  '@growi/ui',
+];
+
+// This config is used at build time only (next build / next dev).
+// Production runtime uses next.config.prod.cjs (installed as next.config.js by assemble-prod.sh).
+const nextConfig: NextConfig = {
+  reactStrictMode: true,
+  poweredByHeader: false,
+  pageExtensions: ['page.tsx', 'page.ts', 'page.jsx', 'page.js'],
+  i18n,
+
+  serverExternalPackages: [
+    'handsontable', // Legacy v6.2.2 requires @babel/polyfill which is unavailable; client-only via dynamic import
+  ],
+
+  // for build
+  typescript: {
+    tsconfigPath: 'tsconfig.build.client.json',
+  },
+  transpilePackages: getTranspilePackages(),
+  sassOptions: {
+    loadPaths: [path.resolve(__dirname, 'src')],
+  },
+  experimental: {
+    optimizePackageImports,
+  },
+
+  turbopack: {
+    rules: {
+      // Server-only: auto-wrap getServerSideProps with SuperJSON serialization
+      '*.page.ts': [
+        {
+          condition: { not: 'browser' },
+          loaders: [
+            path.resolve(__dirname, 'src/utils/superjson-ssr-loader.ts'),
+          ],
+          as: '*.ts',
+        },
+      ],
+      '*.page.tsx': [
+        {
+          condition: { not: 'browser' },
+          loaders: [
+            path.resolve(__dirname, 'src/utils/superjson-ssr-loader.ts'),
+          ],
+          as: '*.tsx',
+        },
+      ],
+    },
+    resolveAlias: {
+      // Exclude fs from client bundle
+      fs: { browser: './src/lib/empty-module.ts' },
+      // Exclude server-only packages from client bundle
+      'dtrace-provider': { browser: './src/lib/empty-module.ts' },
+      mongoose: { browser: './src/lib/empty-module.ts' },
+      'i18next-fs-backend': { browser: './src/lib/empty-module.ts' },
+      bunyan: { browser: './src/lib/empty-module.ts' },
+      'bunyan-format': { browser: './src/lib/empty-module.ts' },
+      'core-js': { browser: './src/lib/empty-module.ts' },
+    },
+  },
+};
+
+export default nextConfig;

+ 61 - 53
apps/app/package.json

@@ -1,8 +1,8 @@
 {
   "name": "@growi/app",
-  "version": "7.4.5-RC.0",
+  "version": "7.5.0-RC.0",
   "license": "MIT",
-  "private": "true",
+  "private": true,
   "scripts": {
     "//// for production": "",
     "build": "run-p build:*",
@@ -10,15 +10,17 @@
     "build:client": "next build",
     "build:server": "cross-env NODE_ENV=production tspc -p tsconfig.build.server.json",
     "postbuild:server": "shx echo \"Listing files under transpiled\" && shx ls transpiled && shx rm -rf dist && shx mv transpiled/src dist && shx rm -rf transpiled",
-    "clean": "shx rm -rf dist transpiled",
+    "clean": "shx rm -rf dist transpiled .next next.config.js",
     "server": "cross-env NODE_ENV=production node -r dotenv-flow/config dist/server/app.js",
     "server:ci": "pnpm run server --ci",
     "preserver": "cross-env NODE_ENV=production pnpm run migrate",
-    "pre:styles": "vite build -c vite.styles-prebuilt.config.ts",
+    "pre:styles-commons": "vite build -c vite.vendor-styles-commons.ts",
+    "pre:styles-components": "vite build --config vite.vendor-styles-components.ts",
     "migrate": "node -r dotenv-flow/config node_modules/migrate-mongo/bin/migrate-mongo up -f config/migrate-mongo-config.js",
     "//// for development": "",
     "dev": "cross-env NODE_ENV=development nodemon --exec pnpm run ts-node --inspect src/server/app.ts",
-    "dev:pre:styles": "pnpm run pre:styles --mode dev",
+    "dev:pre:styles-commons": "pnpm run pre:styles-commons --mode dev",
+    "dev:pre:styles-components": "pnpm run pre:styles-components",
     "dev:migrate-mongo": "cross-env NODE_ENV=development pnpm run ts-node node_modules/migrate-mongo/bin/migrate-mongo",
     "dev:migrate": "pnpm run dev:migrate:status > tmp/cache/migration-status.out && pnpm run dev:migrate:up",
     "dev:migrate:status": "pnpm run dev:migrate-mongo status -f config/migrate-mongo-config.js",
@@ -32,6 +34,7 @@
     "lint:openapi:apiv3": "node node_modules/swagger2openapi/oas-validate tmp/openapi-spec-apiv3.json",
     "lint:openapi:apiv1": "node node_modules/swagger2openapi/oas-validate tmp/openapi-spec-apiv1.json",
     "lint": "run-p lint:**",
+    "prelint:typecheck": "next typegen",
     "prelint:openapi:apiv3": "pnpm run openapi:generate-spec:apiv3",
     "prelint:openapi:apiv1": "pnpm run openapi:generate-spec:apiv1",
     "test": "vitest run",
@@ -53,21 +56,25 @@
     "version:premajor": "pnpm version premajor --preid=RC"
   },
   "// comments for dependencies": {
-    "@aws-skd/*": "fix version above 3.186.0 that is required by mongodb@4.16.0",
-    "@keycloak/keycloak-admin-client": "19.0.0 or above exports only ESM.",
-    "escape-string-regexp": "5.0.0 or above exports only ESM",
-    "next-themes": "0.3.0 causes a type error: https://github.com/pacocoursey/next-themes/issues/122",
-    "string-width": "5.0.0 or above exports only ESM."
+    "@keycloak/keycloak-admin-client": "19.0.0 or above exports only ESM. API breaking changes require separate migration effort."
   },
   "dependencies": {
     "@akebifiky/remark-simple-plantuml": "^1.0.2",
-    "@aws-sdk/client-s3": "3.454.0",
-    "@aws-sdk/lib-storage": "3.454.0",
-    "@aws-sdk/s3-request-presigner": "3.454.0",
+    "@aws-sdk/client-s3": "^3.1014.0",
+    "@aws-sdk/lib-storage": "^3.1014.0",
+    "@aws-sdk/s3-request-presigner": "^3.1014.0",
     "@azure/identity": "^4.4.1",
     "@azure/openai": "^2.0.0",
     "@azure/storage-blob": "^12.16.0",
     "@browser-bunyan/console-formatted-stream": "^1.8.0",
+    "@codemirror/autocomplete": "^6.18.4",
+    "@codemirror/commands": "^6.8.0",
+    "@codemirror/lang-markdown": "^6.3.2",
+    "@codemirror/language": "^6.12.1",
+    "@codemirror/language-data": "^6.5.1",
+    "@codemirror/merge": "^6.8.0",
+    "@codemirror/state": "^6.5.2",
+    "@codemirror/view": "^6.39.14",
     "@cspell/dynamic-import": "^8.15.4",
     "@elastic/elasticsearch7": "npm:@elastic/elasticsearch@^7.17.4",
     "@elastic/elasticsearch8": "npm:@elastic/elasticsearch@^8.18.2",
@@ -75,6 +82,7 @@
     "@godaddy/terminus": "^4.9.0",
     "@google-cloud/storage": "^5.8.5",
     "@growi/core": "workspace:^",
+    "@growi/emoji-mart-data": "workspace:^",
     "@growi/pdf-converter-client": "workspace:^",
     "@growi/pluginkit": "workspace:^",
     "@growi/presentation": "workspace:^",
@@ -85,7 +93,13 @@
     "@growi/remark-growi-directive": "workspace:^",
     "@growi/remark-lsx": "workspace:^",
     "@growi/slack": "workspace:^",
+    "@handsontable/react": "=2.1.0",
+    "@headless-tree/core": "^1.5.3",
+    "@headless-tree/react": "^1.5.3",
     "@keycloak/keycloak-admin-client": "^18.0.0",
+    "@lezer/highlight": "^1.2.3",
+    "@marp-team/marp-core": "^3.9.1",
+    "@marp-team/marpit": "^2.6.1",
     "@opentelemetry/api": "^1.9.0",
     "@opentelemetry/auto-instrumentations-node": "^0.60.1",
     "@opentelemetry/exporter-metrics-otlp-grpc": "^0.202.0",
@@ -95,8 +109,12 @@
     "@opentelemetry/sdk-node": "^0.202.0",
     "@opentelemetry/sdk-trace-node": "^2.0.1",
     "@opentelemetry/semantic-conventions": "^1.34.0",
+    "@replit/codemirror-emacs": "^6.1.0",
+    "@replit/codemirror-vim": "^6.2.1",
+    "@replit/codemirror-vscode-keymap": "^6.0.2",
     "@slack/web-api": "^6.2.4",
     "@slack/webhook": "^6.0.0",
+    "@tanstack/react-virtual": "^3.13.12",
     "@types/async": "^3.2.24",
     "@types/multer": "^1.4.12",
     "JSONStream": "^1.3.5",
@@ -107,10 +125,12 @@
     "axios-retry": "^3.2.4",
     "babel-plugin-superjson-next": "^0.4.2",
     "body-parser": "^1.20.3",
+    "bootstrap": "^5.3.8",
     "browser-bunyan": "^1.8.0",
     "bson-objectid": "^2.0.4",
     "bunyan": "^1.8.15",
-    "check-node-version": "^4.2.1",
+    "cm6-theme-basic-light": "^0.2.0",
+    "codemirror": "^6.0.1",
     "compression": "^1.7.4",
     "connect-flash": "~0.1.1",
     "connect-mongo": "^4.6.0",
@@ -123,10 +143,11 @@
     "dayjs": "^1.11.7",
     "detect-indent": "^7.0.0",
     "diff": "^5.0.0",
+    "diff2html": "^3.4.47",
     "diff_match_patch": "^0.1.1",
     "dotenv-flow": "^3.2.0",
+    "downshift": "^8.2.3",
     "ejs": "^3.1.10",
-    "escape-string-regexp": "^4.0.0",
     "expose-gc": "^1.0.0",
     "express": "^4.20.0",
     "express-bunyan-logger": "^1.3.3",
@@ -134,6 +155,7 @@
     "express-session": "^1.16.1",
     "express-validator": "^6.14.0",
     "extensible-custom-error": "^0.0.7",
+    "fastest-levenshtein": "^1.0.16",
     "form-data": "^4.0.4",
     "graceful-fs": "^4.1.11",
     "hast-util-sanitize": "^5.0.1",
@@ -142,6 +164,8 @@
     "helmet": "^4.6.0",
     "http-errors": "^2.0.0",
     "i18next": "^23.16.5",
+    "i18next-http-backend": "^2.6.2",
+    "i18next-localstorage-backend": "^4.2.0",
     "i18next-resources-to-backend": "^1.2.1",
     "is-absolute-url": "^4.0.1",
     "is-iso-date": "^0.0.1",
@@ -169,14 +193,14 @@
     "mongoose-gridfs": "^1.3.0",
     "mongoose-paginate-v2": "^1.3.9",
     "mongoose-unique-validator": "^2.0.3",
+    "motion": "^12.35.0",
     "multer": "~1.4.0",
     "multer-autoreap": "^1.0.3",
     "mustache": "^4.2.0",
-    "next": "^14.2.35",
+    "next": "^16.2.1",
     "next-dynamic-loading-props": "^0.1.1",
     "next-i18next": "^15.3.1",
-    "next-superjson": "^1.0.7",
-    "next-themes": "^0.2.1",
+    "next-themes": "^0.4.6",
     "nocache": "^4.0.0",
     "node-cron": "^3.0.2",
     "nodemailer": "^6.9.15",
@@ -184,6 +208,7 @@
     "openai": "^4.96.2",
     "openid-client": "^5.4.0",
     "p-retry": "^4.0.0",
+    "pako": "^2.1.0",
     "passport": "^0.6.0",
     "passport-github": "^1.1.0",
     "passport-google-oauth20": "^2.0.0",
@@ -191,23 +216,30 @@
     "passport-local": "^1.0.0",
     "passport-saml": "^3.2.0",
     "pathe": "^2.0.3",
+    "pretty-bytes": "^6.1.1",
     "prop-types": "^15.8.1",
-    "qs": "^6.14.1",
+    "qs": "^6.14.2",
     "rate-limiter-flexible": "^2.3.7",
     "react": "^18.2.0",
     "react-bootstrap-typeahead": "^6.3.2",
     "react-card-flip": "^1.0.10",
+    "react-copy-to-clipboard": "^5.0.1",
     "react-datepicker": "^4.7.0",
     "react-disable": "^0.1.1",
+    "react-dnd": "^14.0.5",
+    "react-dnd-html5-backend": "^14.1.0",
     "react-dom": "^18.2.0",
+    "react-dropzone": "^14.2.3",
     "react-error-boundary": "^3.1.4",
+    "react-hook-form": "^7.45.4",
     "react-i18next": "^15.1.1",
     "react-image-crop": "^8.3.0",
+    "react-input-autosize": "^3.0.0",
     "react-markdown": "^9.0.1",
     "react-multiline-clamp": "^2.0.0",
-    "react-scroll": "^1.8.7",
     "react-stickynode": "^4.1.1",
-    "react-syntax-highlighter": "^15.5.0",
+    "react-syntax-highlighter": "^16.1.0",
+    "react-toastify": "^9.1.3",
     "react-use-ripple": "^1.5.2",
     "reactstrap": "^9.2.2",
     "reconnecting-websocket": "^4.4.0",
@@ -228,9 +260,11 @@
     "remark-parse": "^11.0.0",
     "remark-rehype": "^11.1.1",
     "remark-stringify": "^11.0.0",
+    "reveal.js": "^4.4.8",
     "sanitize-filename": "^1.6.3",
+    "simplebar-react": "^2.3.6",
     "socket.io": "^4.7.5",
-    "string-width": "=4.2.2",
+    "string-width": "^7.0.0",
     "superjson": "^2.2.2",
     "swagger-jsdoc": "^6.2.8",
     "swr": "^2.3.2",
@@ -250,31 +284,23 @@
     "validator": "^13.15.22",
     "ws": "^8.17.1",
     "xss": "^1.0.15",
+    "y-codemirror.next": "^0.3.5",
     "y-mongodb-provider": "^0.2.0",
-    "y-socket.io": "^1.1.3",
+    "y-websocket": "^2.0.4",
     "yjs": "^13.6.18",
     "zod": "^3.24.2"
   },
   "// comments for defDependencies": {
-    "bootstrap": "v5.3.3 has a bug. refs: https://github.com/twbs/bootstrap/issues/39798",
     "@handsontable/react": "v3 requires handsontable >= 7.0.0.",
-    "handsontable": "v7.0.0 or above is no loger MIT lisence.",
-    "mongodb": "mongoose which is used requires mongo@4.16.0."
+    "handsontable": "v7.0.0 or above is no loger MIT lisence."
   },
   "devDependencies": {
     "@apidevtools/swagger-parser": "^10.1.1",
-    "@codemirror/state": "^6.5.2",
-    "@emoji-mart/data": "^1.2.1",
     "@growi/core-styles": "workspace:^",
     "@growi/custom-icons": "workspace:^",
     "@growi/editor": "workspace:^",
     "@growi/ui": "workspace:^",
-    "@handsontable/react": "=2.1.0",
-    "@headless-tree/core": "^1.5.3",
-    "@headless-tree/react": "^1.5.3",
-    "@next/bundle-analyzer": "^14.1.3",
     "@popperjs/core": "^2.11.8",
-    "@tanstack/react-virtual": "^3.13.12",
     "@testing-library/jest-dom": "^6.5.0",
     "@testing-library/user-event": "^14.5.2",
     "@types/archiver": "^6.0.2",
@@ -285,10 +311,10 @@
     "@types/ldapjs": "^2.2.5",
     "@types/mdast": "^4.0.4",
     "@types/node-cron": "^3.0.11",
+    "@types/nodemailer": "6.4.22",
     "@types/react": "^18.2.14",
     "@types/react-dom": "^18.2.6",
     "@types/react-input-autosize": "^2.2.4",
-    "@types/react-scroll": "^1.8.4",
     "@types/react-stickynode": "^4.0.3",
     "@types/supertest": "^6.0.3",
     "@types/testing-library__dom": "^7.5.0",
@@ -299,20 +325,13 @@
     "@types/uuid": "^10.0.0",
     "@types/ws": "^8.18.1",
     "babel-loader": "^8.2.5",
-    "bootstrap": "=5.3.2",
     "commander": "^14.0.0",
     "connect-browser-sync": "^2.1.0",
-    "diff2html": "^3.4.47",
-    "downshift": "^8.2.3",
     "eazy-logger": "^3.1.0",
-    "fastest-levenshtein": "^1.0.16",
     "fslightbox-react": "^1.7.6",
     "handsontable": "=6.2.2",
     "happy-dom": "^15.7.4",
     "i18next-chained-backend": "^4.6.2",
-    "i18next-hmr": "^3.1.3",
-    "i18next-http-backend": "^2.6.2",
-    "i18next-localstorage-backend": "^4.2.0",
     "jotai-devtools": "^0.11.0",
     "load-css-file": "^1.0.0",
     "material-icons": "^1.11.3",
@@ -321,25 +340,14 @@
     "mongodb-connection-string-url": "^7.0.0",
     "mongodb-memory-server-core": "^9.1.1",
     "morgan": "^1.10.0",
-    "null-loader": "^4.0.1",
     "openapi-typescript": "^7.8.0",
-    "pretty-bytes": "^6.1.1",
-    "react-copy-to-clipboard": "^5.0.1",
-    "react-dnd": "^14.0.5",
-    "react-dnd-html5-backend": "^14.1.0",
-    "react-dropzone": "^14.2.3",
-    "react-hook-form": "^7.45.4",
-    "react-hotkeys": "^2.0.0",
-    "react-input-autosize": "^3.0.0",
-    "react-toastify": "^9.1.3",
     "rehype-rewrite": "^4.0.2",
     "remark-github-admonitions-to-directives": "^2.0.0",
     "sass": "^1.53.0",
-    "simplebar-react": "^2.3.6",
     "socket.io-client": "^4.7.5",
-    "source-map-loader": "^4.0.1",
     "supertest": "^7.1.4",
     "swagger2openapi": "^7.0.8",
+    "tinykeys": "^3.0.0",
     "unist-util-is": "^6.0.0",
     "unist-util-visit-parents": "^6.0.0"
   }

+ 1 - 1
apps/app/playwright.config.ts

@@ -52,7 +52,7 @@ export default defineConfig({
   reporter: process.env.CI ? [['github'], ['blob']] : 'list',
 
   webServer: {
-    command: 'pnpm run server',
+    command: process.env.GROWI_WEBSERVER_COMMAND ?? 'pnpm run server',
     url: 'http://localhost:3000',
     reuseExistingServer: !process.env.CI,
     stdout: 'ignore',

+ 21 - 0
apps/app/public/static/locales/en_US/admin.json

@@ -376,6 +376,15 @@
     "transmission_method": "Transmission Method",
     "smtp_label": "SMTP",
     "ses_label": "SES(AWS)",
+    "oauth2_label": "OAuth 2.0 (Google Workspace)",
+    "oauth2_description": "Configure OAuth 2.0 authentication for sending emails using Google Workspace. You need to create OAuth 2.0 credentials in Google Cloud Console and obtain a refresh token.",
+    "oauth2_user": "Email Address",
+    "oauth2_user_help": "The email address of the authorized Google account",
+    "oauth2_client_id": "Client ID",
+    "oauth2_client_secret": "Client Secret",
+    "oauth2_refresh_token": "Refresh Token",
+    "oauth2_refresh_token_help": "The refresh token obtained from OAuth 2.0 authorization flow",
+    "placeholder_leave_blank": "Leave blank to keep existing value",
     "send_test_email": "Send a test-email",
     "success_to_send_test_email": "Success to send a test-email",
     "smtp_settings": "SMTP settings",
@@ -447,6 +456,18 @@
       "tag_names": "Tag names",
       "tag_attributes": "Tag attributes",
       "import_recommended": "Import recommended {{target}}"
+    },
+    "content-disposition_header": "Content-Disposition Mime Type Settings",
+    "content-disposition_options": {
+      "add_header": "Add Mime Types",
+      "note": "Note: Adding a mime type will automatically remove it from the other list.",
+      "inline_header": "Inline Mime Types",
+      "attachment_header": "Attachment Mime Types",
+      "inline_button": "Inline",
+      "attachment_button": "Attachment",
+      "no_inline": "No inline types set.",
+      "no_attachment": "No attachment types set.",
+      "remove_button": "Remove"
     }
   },
   "customize_settings": {

+ 48 - 27
apps/app/public/static/locales/fr_FR/admin.json

@@ -15,8 +15,8 @@
     "security_settings": "Sécurité",
     "scope_of_page_disclosure": "Confidentialité de la page",
     "set_point": "Valeur",
-    "Guest Users Access": "Accès invité",
-    "readonly_users_access": "Accès des utilisateurs lecture seule",
+    "Guest Users Access": "Accès public",
+    "readonly_users_access": "Utilisateurs en lecture seule",
     "always_hidden": "Toujours caché",
     "always_displayed": "Toujours affiché",
     "Fixed by env var": "Configuré par la variable d'environnement <code>{{key}}={{value}}</code>.",
@@ -36,25 +36,25 @@
     "page_listing_2_desc": "Voir les pages restreintes au groupe utilisateur lors de la recherche",
     "page_access_rights": "Lecture",
     "page_delete_rights": "Suppression",
-    "page_delete": "Suppression de page",
-    "page_delete_completely": "Suppression complète de page",
-    "comment_manage_rights": "Droits de gestion des commentaires",
-    "other_options": "Paramètres supplémentaires",
-    "deletion_explanation": "Restreindre les utilisateurs pouvant supprimer une page.",
-    "complete_deletion_explanation": "Restreindre les utilisateurs pouvant supprimer complètement une page.",
+    "page_delete": "Corbeille",
+    "page_delete_completely": "Suppression définitive",
+    "comment_manage_rights": "Gestion des commentaires",
+    "other_options": "Autres options",
+    "deletion_explanation": "Restreindre les utilisateurs pouvant mettre à la corbeille une page.",
+    "complete_deletion_explanation": "Restreindre les utilisateurs pouvant supprimer définitivement une page.",
     "recursive_deletion_explain": "Restreindre les utilisateurs pouvant récursivement supprimer une page.",
     "recursive_complete_deletion_explain": "Restreindre les utilisateurs pouvant récursivement supprimer complètement une page.",
     "is_all_group_membership_required_for_page_complete_deletion": "L'utilisateur doit faire partie de tout les groupes ayant l'accès à la page",
     "is_all_group_membership_required_for_page_complete_deletion_explanation": "Effectif lorsque les paramètres de page sont \"Seulement groupes spécifiés\".",
-    "inherit": "Hériter(Utilise le même paramètre que pour une page)",
-    "admin_only": "Administrateur seulement",
+    "inherit": "Hériter",
+    "admin_only": "Administrateur",
     "admin_and_author": "Administrateur et auteur",
     "anyone": "Tout le monde",
     "user_homepage_deletion": {
-      "user_homepage_deletion": "Suppression de page d'accueil utilisateur",
-      "enable_user_homepage_deletion": "Suppression de page d'accueil utilisateur",
+      "user_homepage_deletion": "Page d'utilisateur",
+      "enable_user_homepage_deletion": "Autoriser la suppression",
       "enable_force_delete_user_homepage_on_user_deletion": "Supprimer la page d'accueil et ses pages enfants",
-      "desc": "Les pages d'accueil utilisateurs pourront être supprimées."
+      "desc": "La page d'accueuil d'un utilisateur supprimé sera supprimée."
     },
     "disable_user_pages": {
       "disable_user_pages": "Désactiver les pages utilisateur",
@@ -63,13 +63,13 @@
     },
     "session": "Session",
     "max_age": "Âge maximal (ms)",
-    "max_age_desc": "Spécifie (en milliseconde) l'âge maximal d'une session <br>Par défaut: 2592000000 (30 jours)",
-    "max_age_caution": "Un rédemarrage du serveur est nécessaire lorsque cette valeur est modifiée",
+    "max_age_desc": "L'âge maximal (en millisecondes) d'une session <br>Par défaut: 2592000000 (30 jours)",
+    "max_age_caution": "La modification de cette valeur nécessite un rédemarrage du serveur.",
     "forced_update_desc": "Ce paramètre à été modifié. Valeur précedente: ",
     "page_delete_rights_caution": "Lorsque \"Supprimer / Supprimer récursivement\" est activé, le paramètre is \"Supprimer / Supprimer complètement\" est écrasé. <br> <br> Administrateur seulement > Administrateur et auteur > Tout le monde",
     "Authentication mechanism settings": "Mécanisme d'authentification",
     "setup_is_not_yet_complete": "Configuration incomplète",
-    "xss_prevent_setting": "Prévenir les attaques XSS(Cross Site Scripting)",
+    "xss_prevent_setting": "Prévention des attaques XSS",
     "xss_prevent_setting_link": "Paramètres Markdown",
     "callback_URL": "URL de Callback",
     "providerName": "Nom du fournisseur",
@@ -89,8 +89,8 @@
     "updated_general_security_setting": "Paramètres mis à jour",
     "setup_not_completed_yet": "Configuration incomplète",
     "guest_mode": {
-      "deny": "Refuser (Utilisateurs inscrits seulement)",
-      "readonly": "Autoriser (Lecture seule)"
+      "deny": "Interdit",
+      "readonly": "Lecture seule"
     },
     "read_only_users_comment": {
       "deny": "Ne peut pas commenter",
@@ -298,7 +298,7 @@
     "normalize_description": "Réparer les indices cassés.",
     "rebuild": "Reconstruire",
     "rebuild_button": "Reconstruire",
-    "rebuild_description_1": "Reconstruire l'index est les données de pages",
+    "rebuild_description_1": "Reconstruire l'index et les données de pages",
     "rebuild_description_2": "Cela peut prendre un certain temps."
   },
   "mailer_setup_required": "La <a href='/admin/app'>configuration du SMTP</a> est requise.",
@@ -360,9 +360,9 @@
     "confidential_name": "Nom interne",
     "confidential_example": "ex): usage interne seulement",
     "default_language": "Langue par défaut",
-    "default_mail_visibility": "Mode d'affichage de l'adresse courriel",
-    "default_read_only_for_new_user": "Restriction d'édition pour les nouveaux utilisateurs",
-    "set_read_only_for_new_user": "Rendre les nouveaux utilisateurs en lecture seule",
+    "default_mail_visibility": "Confidentialité de l'adresse courriel",
+    "default_read_only_for_new_user": "Permissions des nouveaux utilisateurs",
+    "set_read_only_for_new_user": "Lecture seule",
     "file_uploading": "Téléversement de fichiers",
     "page_bulk_export_settings": "Paramètres d'exportation de pages par lots",
     "enable_page_bulk_export": "Activer l'exportation groupée",
@@ -376,6 +376,15 @@
     "transmission_method": "Mode",
     "smtp_label": "SMTP",
     "ses_label": "SES(AWS)",
+    "oauth2_label": "OAuth 2.0 (Google Workspace)",
+    "oauth2_description": "Configurez l'authentification OAuth 2.0 pour envoyer des courriels en utilisant Google Workspace. Vous devez créer des identifiants OAuth 2.0 dans la console Google Cloud et obtenir un jeton de rafraîchissement.",
+    "oauth2_user": "Adresse courriel",
+    "oauth2_user_help": "L'adresse courriel du compte Google autorisé",
+    "oauth2_client_id": "ID client",
+    "oauth2_client_secret": "Secret client",
+    "oauth2_refresh_token": "Jeton de rafraîchissement",
+    "oauth2_refresh_token_help": "Le jeton de rafraîchissement obtenu à partir du flux d'autorisation OAuth 2.0",
+    "placeholder_leave_blank": "Laisser vide pour conserver la valeur existante",
     "send_test_email": "Courriel d'essai",
     "success_to_send_test_email": "Courriel d'essai envoyé",
     "smtp_settings": "Configuration SMTP",
@@ -424,9 +433,9 @@
     "lineBreak_desc": "Conversion du saut de ligne automatique.",
     "lineBreak_options": {
       "enable_lineBreak": "Saut de ligne",
-      "enable_lineBreak_desc": "Convertir le saut de ligne<code>&lt;br&gt;</code>en HTML",
+      "enable_lineBreak_desc": "Convertir le saut de ligne vers un tag HTML <code>&lt;br&gt;</code>.",
       "enable_lineBreak_for_comment": "Saut de ligne dans les commentaires",
-      "enable_lineBreak_for_comment_desc": "Convertir le saut de ligne dans les commentaires<code>&lt;br&gt;</code>en HTML"
+      "enable_lineBreak_for_comment_desc": "Convertir le saut de ligne dans les commentaires vers un tag HTML <code>&lt;br&gt;</code>."
     },
     "indent_header": "Indentation",
     "indent_desc": "Taille d'indentation dans une page.",
@@ -447,6 +456,18 @@
       "tag_names": "Nom de tags",
       "tag_attributes": "Attributs de tags",
       "import_recommended": "Importer {{target}}"
+    },
+    "content-disposition_header": "Paramètres des types MIME Content-Disposition",
+    "content-disposition_options": {
+      "add_header": "Ajouter des types MIME",
+      "note": "Note : L'ajout d'un type MIME le supprimera automatiquement de l'autre liste.",
+      "inline_header": "Types MIME Inline",
+      "attachment_header": "Types MIME en pièce jointe",
+      "inline_button": "Inline",
+      "attachment_button": "Pièce jointe",
+      "no_inline": "Aucun type inline défini.",
+      "no_attachment": "Aucun type de pièce jointe défini.",
+      "remove_button": "Supprimer"
     }
   },
   "customize_settings": {
@@ -728,14 +749,14 @@
   },
   "user_management": {
     "user_management": "Utilisateurs",
-    "invite_users": "Nouvel utilisateur temporaire",
+    "invite_users": "Nouvel utilisateur",
     "click_twice_same_checkbox": "Il est nécessaire de sélectionner une option.",
     "status": "Statut",
     "invite_modal": {
       "emails": "Adresse(s) courriel(s) (Supporte l'usage de plusieurs lignes)",
-      "description1": "Créer des utilisateurs temporaires avec une adresse courriel.",
+      "description1": "Créer des utilisateurs avec une adresse courriel.",
       "description2": "Un mot de passe temporaire est généré automatiquement.",
-      "invite_thru_email": "Courriel d'invitation",
+      "invite_thru_email": "Envoyer une invitation",
       "mail_setting_link": "<span class='material-symbols-outlined me-2'>settings</span><a href='/admin/app'>Paramètres courriel</a>",
       "valid_email": "Adresse courriel valide requise",
       "temporary_password": "Cette utilisateur a un mot de passe temporaire",

+ 3 - 1
apps/app/public/static/locales/fr_FR/commons.json

@@ -1,10 +1,12 @@
 {
   "Show": "Afficher",
+  "View": "Lecture",
+  "Edit": "Écriture",
   "Hide": "Cacher",
   "Add": "Ajouter",
   "Insert": "Insérer",
   "Reset": "Réinitialiser",
-  "Sign out": "Se déconnecter",
+  "Sign out": "Déconnexion",
   "New": "Nouveau",
   "Delete": "Supprimer",
   "meta": {

+ 53 - 52
apps/app/public/static/locales/fr_FR/translation.json

@@ -16,7 +16,7 @@
   "tablet": "Tablette",
   "Click to copy": "Cliquer pour copier",
   "Rename": "Renommer",
-  "Move/Rename": "Déplacer/renommer",
+  "Move/Rename": "Déplacer",
   "Redirected": "Redirigé",
   "Unlinked": "Délié",
   "unlink_redirection": "Délier la redirection",
@@ -39,11 +39,11 @@
   "Category": "Catégorie",
   "User": "Utilisateur",
   "account_id": "Identifiant de compte",
-  "Update": "Mettre à jour",
+  "Update": "Enregistrer",
   "Update Page": "Mettre à jour la page",
   "Error": "Erreur",
   "Warning": "Avertissement",
-  "Sign in": "Se connecter",
+  "Sign in": "Connexion",
   "Sign in with External auth": "Se connecter avec {{signin}}",
   "Sign up is here": "Inscription",
   "Sign in is here": "Connexion",
@@ -61,15 +61,14 @@
   "History": "Historique",
   "attachment_data": "Pièces jointes",
   "No_attachments_yet": "Aucune pièce jointe.",
-  "Presentation Mode": "Mode présentation",
+  "Presentation Mode": "Présentation",
   "Not available for guest": "Indisponible pour les invités",
   "Not available in this version": "Indisponible dans cette version",
   "Not available when \"anyone with the link\" is selected": "Si \"Tous les utilisateurs disposant du lien\" est sélectionné, la portée ne peut pas être modifiée",
-  "No users have liked this yet": "Aucun utilisateur n'a aimé cette page",
   "No users have liked this yet.": "Aucun utilisateur n'a aimé cette page.",
   "No users have bookmarked yet": "Aucun utilisateur n'a mis en favoris cette page",
   "Create Archive Page": "Créer page d'archive",
-  "Create Sidebar Page": "Créer page <strong>/Sidebar</strong>",
+  "Create Sidebar Page": "Créer la page <strong>/Sidebar</strong>",
   "File type": "Type de fichier",
   "Target page": "Page cible",
   "Include Attachment File": "Inclure le fichier de pièces jointes",
@@ -87,7 +86,7 @@
   "Create/Edit Template": "Créer/Modifier page modèle",
   "Go to this version": "Voir cette version",
   "View diff": "Voir le diff",
-  "No diff": "Aucune différences",
+  "No diff": "Aucune différence",
   "Latest": "Dernière version",
   "User ID": "Identifiant utilisateur",
   "User Settings": "Paramètres utilisateur",
@@ -123,6 +122,8 @@
   "UserGroup": "Groupe utilisateur",
   "Basic Settings": "Paramètres de base",
   "The contents entered here will be shown in the header etc": "Le contenu entré ici sera visible dans l'en-tête",
+  "Browsing of this page is restricted": "Le contenu de cette page est restreint.",
+  "Forbidden": "Accès interdit",
   "Public": "Tout le monde",
   "Anyone with the link": "Tous les utilisateurs disposant du lien",
   "Specified users only": "Utilisateurs spécifiés",
@@ -230,7 +231,7 @@
     "notice": {
       "apitoken_issued": "Aucun jeton d'API existant.",
       "update_token1": "Un nouveau jeton peut être généré.",
-      "update_token2": "Modifiez le jeton aux endroits où celui-ci est utilisé."
+      "update_token2": "Modifiez le jeton aux endroits où celui-ci est utilisé, car l'ancien sera invalide."
     },
     "form_help": {}
   },
@@ -295,17 +296,17 @@
   "Other Settings": "Autres paramètres",
   "in_app_notification_settings": {
     "in_app_notification_settings": "Notifications",
-    "subscribe_settings": "Paramètres d'abonnement automatique aux notifications de pages",
+    "subscribe_settings": "Notifications d'application",
     "default_subscribe_rules": {
-      "page_create": "S'abonner aux modifications d'une page lors de sa création."
+      "page_create": "Abonne systématiquement aux notifications de modification d'une page lors de sa création."
     }
   },
   "ui_settings": {
     "ui_settings": "Interface",
     "side_bar_mode": {
       "settings": "Paramètres navigation latérale",
-      "side_bar_mode_setting": "Épingler la navigation latérale",
-      "description": "Activer pour toujours afficher la barre de navigation latérale lorsque l'écran est large. Si la largeur d'écran est faible, le cas inverse est applicable."
+      "side_bar_mode_setting": "Épingler la barre latérale",
+      "description": "Épingle sur l'écran la barre de navigation latérale. Si la résolution de l'écran est trop faible, la barre latérale ne sera plus épinglée."
     }
   },
   "color_mode_settings": {
@@ -368,7 +369,7 @@
     "keymap": "Raccourcis",
     "indent": "Indentation",
     "paste": {
-      "title": "Comportement du collage",
+      "title": "Collage",
       "both": "Texte et fichier",
       "text": "Texte seulement",
       "file": "Fichier seulement"
@@ -377,7 +378,7 @@
     "editor_assistant": "Assistant d'édition",
     "Show active line": "Surligner la ligne active",
     "auto_format_table": "Formatter les tableaux",
-    "overwrite_scopes": "{{operation}} et écraser les scopes des pages enfants",
+    "overwrite_scopes": "{{operation}} et écraser les permissions des pages enfants",
     "notice": {
       "conflict": "Sauvegarde impossible, la page est en cours de modification par un autre utilisateur. Recharger la page."
     },
@@ -385,10 +386,10 @@
   },
   "page_comment": {
     "comments": "Commentaires",
-    "comment": "Commmenter",
-    "preview": "Prévisualiser",
-    "write": "Écrire",
-    "add_a_comment": "Ajouter un commentaire",
+    "comment": "Cer",
+    "preview": "Visualiser",
+    "write": "Rédaction",
+    "add_a_comment": "Nouveau commentaire",
     "display_the_page_when_posting_this_comment": "Afficher la page en postant le commentaire",
     "no_user_found": "Aucun utilisateur trouvé",
     "reply": "Répondre",
@@ -408,22 +409,22 @@
     "revision": "Révision",
     "comparing_source": "Source",
     "comparing_target": "Cible",
-    "comparing_revisions": "Comparer les modifications",
+    "comparing_revisions": "Historique des modifications",
     "compare_latest": "Comparer avec la version la plus récente",
     "compare_previous": "Comparer avec la version précédente"
   },
   "modal_rename": {
     "label": {
-      "Move/Rename page": "Déplacer/renommer page",
-      "New page name": "Nouveau chemin",
+      "Move/Rename page": "Déplacer ou renommer la page",
+      "New page name": "Nouvel emplacement",
       "Failed to get subordinated pages": "échec de récupération des pages subordinnées",
       "Failed to get exist path": "échec de la récupération du chemin",
-      "Current page name": "Chemin actuel",
+      "Current page name": "Emplacement actuel",
       "Rename this page only": "Renommer cette page",
       "Force rename all child pages": "Forcer le renommage des pages",
       "Other options": "Autres options",
-      "Do not update metadata": "Ne pas modifier les métadonnées",
-      "Redirect": "Redirection automatique"
+      "Do not update metadata": "Conserver les métadonnées",
+      "Redirect": "Redirection"
     },
     "help": {
       "redirect": "Redirige automatiquement vers le nouveau chemin de la page.",
@@ -456,18 +457,18 @@
   },
   "modal_duplicate": {
     "label": {
-      "Duplicate page": "Dupliquer",
-      "New page name": "Nom de la page",
+      "Duplicate page": "Dupliquer la page",
+      "New page name": "Emplacement de la nouvelle page",
       "Failed to get subordinated pages": "échec de récupération des pages subordinnées",
-      "Current page name": "Nom de la page courante",
-      "Recursively": "Récursivement",
+      "Current page name": "Emplacement actuel",
+      "Recursively": "Récursif",
       "Duplicate without exist path": "Dupliquer sans le chemin existant",
       "Same page already exists": "Une page avec ce chemin existe déjà",
-      "Only duplicate user related pages": "Seul les pages dupliquées auquelles vous avez accès"
+      "Only duplicate user related pages": "Hériter les permissions"
     },
     "help": {
-      "recursive": "Dupliquer les pages enfants récursivement",
-      "only_inherit_user_related_groups": "Si la page est configuré en \"Seulement dans le groupe\", les groupes auxquels vous n'appartenez pas perdront l'accès aux pages dupliquées"
+      "recursive": "Duplique également les pages enfants.",
+      "only_inherit_user_related_groups": "Les pages restreintes a un groupe hériteront des permissions assignées."
     }
   },
   "duplicated_pages": "{{fromPath}} dupliquée",
@@ -704,20 +705,20 @@
   "template": {
     "modal_label": {
       "Select template": "Sélectionner modèle",
-      "Create/Edit Template Page": "Créer/modifier page modèle",
+      "Create/Edit Template Page": "Créer un modèle",
       "Create template under": "Créer une page modèle enfant"
     },
     "option_label": {
-      "create/edit": "Créer/modifier page modèle",
+      "create/edit": "Modèle",
       "select": "Sélectionner type de page modèle"
     },
     "children": {
       "label": "Modèle pour page enfant",
-      "desc": "Applicable aux pages de même niveau que la page modèle"
+      "desc": "Le modèle est appliqué aux pages du même niveau dans l'arbre."
     },
-    "decendants": {
+    "descendants": {
       "label": "Modèle pour descendants",
-      "desc": "Applicable aux page descendantes"
+      "desc": "Le modèle est appliqué à toutes les pages enfants."
     }
   },
   "sandbox": {
@@ -836,14 +837,14 @@
   "page_export": {
     "failed_to_export": "Échec de l'export",
     "failed_to_count_pages": "Échec du compte des pages",
-    "export_page_markdown": "Exporter la page en Markdown",
+    "export_page_markdown": "Exporter",
     "export_page_pdf": "Exporter la page en PDF",
-    "bulk_export": "Exporter la page et toutes les pages enfants",
+    "bulk_export": "Exporter les pages enfants",
     "bulk_export_download_explanation": "Une notification sera envoyée lorsque l’exportation sera terminée. Pour télécharger le fichier exporté, cliquez sur la notification.",
     "bulk_export_exec_time_warning": "Si le nombre de pages est important, l'exportation peut prendre un certain temps.",
-    "large_bulk_export_warning": "Pour préserver les ressources du système, veuillez éviter d'exporter un grand nombre de pages consécutivement",
+    "large_bulk_export_warning": "Pour préserver les ressources du système, veuillez éviter d'exporter un grand nombre de pages consécutivement.",
     "markdown": "Markdown",
-    "choose_export_format": "Sélectionnez le format d'exportation",
+    "choose_export_format": "Format de l'export",
     "bulk_export_started": "Patientez s'il-vous-plait...",
     "bulk_export_download_expired": "La période de téléchargement a expiré",
     "bulk_export_job_expired": "Le traitement a été interrompu car le temps d'exportation était trop long",
@@ -978,7 +979,7 @@
     }
   },
   "tooltip": {
-    "like": "Like!",
+    "like": "Aimer",
     "cancel_like": "Annuler",
     "bookmark": "Ajouter aux favoris",
     "cancel_bookmark": "Retirer des favoris",
@@ -998,8 +999,8 @@
   },
   "user_home_page": {
     "bookmarks": "Favoris",
-    "recently_created": "Page récentes",
-    "recent_activity": "Activité récente",
+    "recently_created": "Pages récentes",
+    "recent_activity": "Modifications récentes",
     "unknown_action": "a effectué une modification non spécifiée",
     "page_create": "a créé une page",
     "page_update": "a mis à jour une page",
@@ -1034,7 +1035,7 @@
   },
   "tag_edit_modal": {
     "edit_tags": "Étiquettes",
-    "done": "Mettre à jour",
+    "done": "Assigner",
     "tags_input": {
       "tag_name": "Choisir ou créer une étiquette"
     }
@@ -1049,24 +1050,24 @@
     "select_page_location": "Sélectionner emplacement de la page"
   },
   "wip_page": {
-    "save_as_wip": "Sauvegarder comme brouillon",
+    "save_as_wip": "Enregistrer comme brouillon",
     "success_save_as_wip": "Sauvegardée en tant que brouillon",
     "fail_save_as_wip": "Échec de la sauvegarde du brouillon",
-    "alert": "Page en cours d'écriture",
-    "publish_page": "Publier page",
+    "alert": "Page en cours d'écriture.",
+    "publish_page": "Publier",
     "success_publish_page": "Page publiée",
     "fail_publish_page": "Échec de publication de la page"
   },
   "sidebar_header": {
-    "show_wip_page": "Voir brouillon",
+    "show_wip_page": "Inclure les brouillons",
     "compact_view": "Vue compacte"
   },
   "sync-latest-revision-body": {
     "menuitem": "Synchroniser avec la dernière révision",
-    "confirm": "Supprime les données en brouillon et synchronise avec la dernière révision. Synchroniser?",
-    "alert": "Il se peut que le texte le plus récent n'ait pas été synchronisé. Veuillez recharger et vérifier à nouveau.",
-    "success-toaster": "Dernier texte synchronisé",
-    "skipped-toaster": "L'éditeur n'est pas actif. Synchronisation annulée.",
+    "confirm": "Le contenu non enregistré sera écrasé. Synchroniser?",
+    "alert": "Certaines modifications n'ont pas été enregistrées. Veuillez rafraîchir votre onglet de navigateur.",
+    "success-toaster": "Dernière révision synchronisée",
+    "skipped-toaster": "Le mode édition doit être activé pour déclencher la synchronisation. Synchronisation annulée.",
     "error-toaster": "Synchronisation échouée"
   }
 }

+ 9 - 0
apps/app/public/static/locales/ja_JP/admin.json

@@ -385,6 +385,15 @@
     "transmission_method": "送信方法",
     "smtp_label": "SMTP",
     "ses_label": "SES(AWS)",
+    "oauth2_label": "OAuth 2.0 (Google Workspace)",
+    "oauth2_description": "Google Workspaceを使用してメールを送信するためのOAuth 2.0認証を設定します。Google Cloud ConsoleでOAuth 2.0認証情報を作成し、リフレッシュトークンを取得する必要があります。",
+    "oauth2_user": "メールアドレス",
+    "oauth2_user_help": "認証されたGoogleアカウントのメールアドレス",
+    "oauth2_client_id": "クライアントID",
+    "oauth2_client_secret": "クライアントシークレット",
+    "oauth2_refresh_token": "リフレッシュトークン",
+    "oauth2_refresh_token_help": "OAuth 2.0認証フローから取得したリフレッシュトークン",
+    "placeholder_leave_blank": "既存の値を保持する場合は空白のままにしてください",
     "send_test_email": "テストメールを送信",
     "success_to_send_test_email": "テストメールを送信しました。",
     "smtp_settings": "SMTP設定",

+ 21 - 0
apps/app/public/static/locales/ko_KR/admin.json

@@ -376,6 +376,15 @@
     "transmission_method": "전송 방식",
     "smtp_label": "SMTP",
     "ses_label": "SES(AWS)",
+    "oauth2_label": "OAuth 2.0 (Google Workspace)",
+    "oauth2_description": "Google Workspace를 사용하여 이메일을 보내기 위한 OAuth 2.0 인증을 구성합니다. Google Cloud Console에서 OAuth 2.0 자격 증명을 생성하고 갱신 토큰을 얻어야 합니다.",
+    "oauth2_user": "이메일 주소",
+    "oauth2_user_help": "인증된 Google 계정의 이메일 주소",
+    "oauth2_client_id": "클라이언트 ID",
+    "oauth2_client_secret": "클라이언트 시크릿",
+    "oauth2_refresh_token": "갱신 토큰",
+    "oauth2_refresh_token_help": "OAuth 2.0 인증 흐름에서 얻은 갱신 토큰",
+    "placeholder_leave_blank": "기존 값을 유지하려면 비워두세요",
     "send_test_email": "테스트 이메일 전송",
     "success_to_send_test_email": "테스트 이메일 전송 성공",
     "smtp_settings": "SMTP 설정",
@@ -447,6 +456,18 @@
       "tag_names": "태그 이름",
       "tag_attributes": "태그 속성",
       "import_recommended": "권장 {{target}} 가져오기"
+    },
+    "content-disposition_header": "Content-Disposition 마임 타입(MIME Type) 설정",
+    "content-disposition_options": {
+      "add_header": "마임 타입 추가",
+      "note": "참고: 마임 타입을 추가하면 다른 리스트에서 자동으로 제거됩니다.",
+      "inline_header": "인라인(Inline) 마임 타입",
+      "attachment_header": "파일 첨부(Attachment) 마임 타입",
+      "inline_button": "인라인",
+      "attachment_button": "파일 첨부",
+      "no_inline": "설정된 인라인 타입이 없습니다.",
+      "no_attachment": "설정된 파일 첨부 타입이 없습니다.",
+      "remove_button": "삭제"
     }
   },
   "customize_settings": {

+ 21 - 0
apps/app/public/static/locales/zh_CN/admin.json

@@ -384,6 +384,15 @@
     "transmission_method": "传送方法",
     "smtp_label": "SMTP",
     "ses_label": "SES(AWS)",
+    "oauth2_label": "OAuth 2.0 (Google Workspace)",
+    "oauth2_description": "配置 OAuth 2.0 身份验证以使用 Google Workspace 发送电子邮件。您需要在 Google Cloud Console 中创建 OAuth 2.0 凭据并获取刷新令牌。",
+    "oauth2_user": "电子邮件地址",
+    "oauth2_user_help": "已授权的 Google 帐户的电子邮件地址",
+    "oauth2_client_id": "客户端 ID",
+    "oauth2_client_secret": "客户端密钥",
+    "oauth2_refresh_token": "刷新令牌",
+    "oauth2_refresh_token_help": "从 OAuth 2.0 授权流程获得的刷新令牌",
+    "placeholder_leave_blank": "留空以保留现有值",
     "from_e-mail_address": "邮件发出地址",
     "send_test_email": "发送测试邮件",
     "success_to_send_test_email": "成功发送了一封测试邮件",
@@ -456,6 +465,18 @@
       "tag_names": "标记名",
       "tag_attributes": "标记属性",
       "import_recommended": "导入建议 {{target}}"
+    },
+    "content-disposition_header": "Content-Disposition MIME 类型设置",
+    "content-disposition_options": {
+      "add_header": "添加 MIME 类型",
+      "note": "注意:添加 MIME 类型将自动从另一个列表中将其删除。",
+      "inline_header": "内联 (Inline) MIME 类型",
+      "attachment_header": "附件 (Attachment) MIME 类型",
+      "inline_button": "内联",
+      "attachment_button": "附件",
+      "no_inline": "未设置内联类型。",
+      "no_attachment": "未设置附件类型。",
+      "remove_button": "移除"
     }
   },
   "customize_settings": {

برخی فایل ها در این مقایسه diff نمایش داده نمی شوند زیرا تعداد فایل ها بسیار زیاد است