Ver Fonte

Merge pull request #10958 from growilabs/support/migrate-to-pino

support: Migrate logger to pino
mergify[bot] há 9 horas atrás
pai
commit
27ea8fe5cc
100 ficheiros alterados com 3055 adições e 349 exclusões
  1. 670 0
      .kiro/specs/migrate-logger-to-pino/design.md
  2. 156 0
      .kiro/specs/migrate-logger-to-pino/requirements.md
  3. 224 0
      .kiro/specs/migrate-logger-to-pino/research.md
  4. 23 0
      .kiro/specs/migrate-logger-to-pino/spec.json
  5. 263 0
      .kiro/specs/migrate-logger-to-pino/tasks.md
  6. 1 4
      apps/app/.claude/skills/build-optimization/SKILL.md
  7. 2 0
      apps/app/.gitignore
  8. 14 3
      apps/app/bin/postbuild-server.ts
  9. 5 1
      apps/app/config/logger/config.dev.ts
  10. 5 1
      apps/app/config/logger/config.prod.ts
  11. 0 3
      apps/app/next.config.ts
  12. 1 7
      apps/app/package.json
  13. 6 8
      apps/app/src/client/components/PageEditor/PageEditor.tsx
  14. 1 1
      apps/app/src/client/components/RecentActivity/RecentActivity.tsx
  15. 2 2
      apps/app/src/client/components/StickyStretchableScroller.tsx
  16. 7 3
      apps/app/src/features/admin/states/socket-io.ts
  17. 2 2
      apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/index.ts
  18. 2 2
      apps/app/src/features/comment/server/models/comment.ts
  19. 24 15
      apps/app/src/features/growi-plugin/server/services/growi-plugin/growi-plugin.ts
  20. 2 2
      apps/app/src/features/openai/client/components/AiAssistant/AiAssistantSidebar/AiAssistantSidebar.tsx
  21. 2 2
      apps/app/src/features/openai/server/routes/edit/index.ts
  22. 22 13
      apps/app/src/features/openai/server/services/editor-assistant/llm-response-stream-processor.ts
  23. 49 31
      apps/app/src/features/openai/server/services/openai.ts
  24. 5 4
      apps/app/src/features/opentelemetry/server/custom-resource-attributes/application-resource-attributes.ts
  25. 1 1
      apps/app/src/features/opentelemetry/server/custom-resource-attributes/os-resource-attributes.ts
  26. 12 7
      apps/app/src/features/opentelemetry/server/logger.ts
  27. 1 1
      apps/app/src/features/opentelemetry/server/node-sdk-configuration.ts
  28. 1 1
      apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/compress-and-upload.ts
  29. 3 3
      apps/app/src/pages/common-props/commons.ts
  30. 3 3
      apps/app/src/pages/general-page/type-guards.ts
  31. 3 5
      apps/app/src/server/app.ts
  32. 11 21
      apps/app/src/server/crowi/index.ts
  33. 2 2
      apps/app/src/server/events/user.ts
  34. 4 1
      apps/app/src/server/middlewares/access-token-parser/api-token.ts
  35. 3 3
      apps/app/src/server/middlewares/apiv1-form-validator.ts
  36. 3 3
      apps/app/src/server/middlewares/apiv3-form-validator.ts
  37. 16 10
      apps/app/src/server/middlewares/certify-shared-page-attachment/validate-referer/validate-referer.ts
  38. 1 1
      apps/app/src/server/middlewares/login-required.ts
  39. 6 3
      apps/app/src/server/middlewares/safe-redirect.ts
  40. 1 1
      apps/app/src/server/models/activity.ts
  41. 1 1
      apps/app/src/server/models/external-account.ts
  42. 2 2
      apps/app/src/server/models/user-group-relation.ts
  43. 2 2
      apps/app/src/server/routes/apiv3/bookmark-folder.ts
  44. 10 10
      apps/app/src/server/routes/apiv3/g2g-transfer.ts
  45. 18 12
      apps/app/src/server/routes/apiv3/page/update-page.ts
  46. 2 2
      apps/app/src/server/service/config-manager/config-loader.ts
  47. 1 1
      apps/app/src/server/service/external-account.ts
  48. 1 1
      apps/app/src/server/service/file-uploader/gridfs.ts
  49. 38 26
      apps/app/src/server/service/mail/mail.ts
  50. 1 1
      apps/app/src/server/service/mail/oauth2.ts
  51. 1 1
      apps/app/src/server/service/mail/ses.ts
  52. 2 2
      apps/app/src/server/service/mail/smtp.ts
  53. 3 3
      apps/app/src/server/service/page/events/seen.ts
  54. 6 9
      apps/app/src/server/service/page/index.ts
  55. 7 8
      apps/app/src/server/service/s2s-messaging/nchan.ts
  56. 4 4
      apps/app/src/server/service/search-delegator/elasticsearch.ts
  57. 4 4
      apps/app/src/server/service/slack-integration.ts
  58. 3 3
      apps/app/src/server/service/socket-io/socket-io.ts
  59. 2 2
      apps/app/src/server/service/yjs/create-mongodb-persistence.ts
  60. 9 6
      apps/app/src/server/service/yjs/upgrade-handler.ts
  61. 2 2
      apps/app/src/server/service/yjs/yjs.ts
  62. 4 4
      apps/app/src/server/util/slack-legacy.ts
  63. 3 3
      apps/app/src/states/socket-io/global-socket.ts
  64. 5 10
      apps/app/src/utils/logger/index.ts
  65. 3 1
      apps/app/tsconfig.build.server.json
  66. 3 9
      apps/slackbot-proxy/package.json
  67. 6 20
      apps/slackbot-proxy/src/Server.ts
  68. 2 2
      apps/slackbot-proxy/src/config/logger/config.dev.ts
  69. 2 2
      apps/slackbot-proxy/src/config/logger/config.prod.ts
  70. 5 5
      apps/slackbot-proxy/src/controllers/growi-to-slack.ts
  71. 5 2
      apps/slackbot-proxy/src/controllers/slack.ts
  72. 1 1
      apps/slackbot-proxy/src/middlewares/slack-to-growi/authorizer.ts
  73. 1 1
      apps/slackbot-proxy/src/middlewares/slack-to-growi/join-to-conversation.ts
  74. 1 1
      apps/slackbot-proxy/src/services/RegisterService.ts
  75. 4 7
      apps/slackbot-proxy/src/utils/logger/index.ts
  76. 0 1
      package.json
  77. 1 0
      packages/logger/.gitignore
  78. 43 0
      packages/logger/package.json
  79. 130 0
      packages/logger/src/dev/bunyan-format.spec.ts
  80. 73 0
      packages/logger/src/dev/bunyan-format.ts
  81. 87 0
      packages/logger/src/dev/morgan-like-format-options.spec.ts
  82. 65 0
      packages/logger/src/dev/morgan-like-format-options.ts
  83. 92 0
      packages/logger/src/env-var-parser.spec.ts
  84. 33 0
      packages/logger/src/env-var-parser.ts
  85. 98 0
      packages/logger/src/http-logger.spec.ts
  86. 47 0
      packages/logger/src/http-logger.ts
  87. 9 0
      packages/logger/src/index.ts
  88. 103 0
      packages/logger/src/level-resolver.spec.ts
  89. 38 0
      packages/logger/src/level-resolver.ts
  90. 189 0
      packages/logger/src/logger-factory.spec.ts
  91. 90 0
      packages/logger/src/logger-factory.ts
  92. 73 0
      packages/logger/src/transport-factory.spec.ts
  93. 76 0
      packages/logger/src/transport-factory.ts
  94. 22 0
      packages/logger/src/types.ts
  95. 11 0
      packages/logger/tsconfig.json
  96. 37 0
      packages/logger/vite.config.ts
  97. 11 0
      packages/logger/vitest.config.ts
  98. 1 3
      packages/remark-attachment-refs/package.json
  99. 2 3
      packages/remark-attachment-refs/src/client/services/renderer/refs.ts
  100. 1 2
      packages/remark-attachment-refs/src/server/routes/refs.ts

+ 670 - 0
.kiro/specs/migrate-logger-to-pino/design.md

@@ -0,0 +1,670 @@
+# Design Document: migrate-logger-to-pino
+
+## Overview
+
+**Purpose**: This feature migrates GROWI's logging infrastructure from bunyan (with the custom `universal-bunyan` wrapper) to pino, delivering faster structured logging with a smaller dependency footprint.
+
+**Users**: All GROWI developers (logger consumers), operators (log level configuration), and the CI/CD pipeline (dependency management).
+
+**Impact**: Replaces 7 logging-related packages (`bunyan`, `universal-bunyan`, `bunyan-format`, `express-bunyan-logger`, `morgan`, `browser-bunyan`, `@browser-bunyan/console-formatted-stream`) with 3 (`pino`, `pino-pretty`, `pino-http`) plus a new shared package `@growi/logger`. Consumer applications import only `@growi/logger`; `pino-http` is encapsulated within the package.
+
+### Goals
+- Replace bunyan with pino across all apps and packages without functional degradation
+- Preserve namespace-based log level control (config files + env var overrides)
+- Eliminate morgan by consolidating HTTP logging into pino-http
+- Maintain OpenTelemetry diagnostic logger integration
+- Provide a shared `@growi/logger` package as the single logging entry point
+
+### Non-Goals
+- Changing log output semantics (field names, message format) beyond what pino naturally produces
+- Adding new logging capabilities (structured context propagation, remote log shipping)
+- Migrating to pino v10 (deferred until OTel instrumentation supports it)
+- Changing the namespace naming convention (e.g., `growi:service:page`)
+
+## Architecture
+
+### Existing Architecture Analysis
+
+The current logging stack has these layers:
+
+1. **universal-bunyan** — custom wrapper providing: namespace-based level control via config + env vars, platform detection (Node.js/browser), stream selection (bunyan-format for Node.js, ConsoleFormattedStream for browser), logger caching
+2. **Per-app loggerFactory** — thin wrapper that loads dev/prod config and delegates to universal-bunyan
+3. **bunyan / browser-bunyan** — underlying logger implementations
+4. **express-bunyan-logger / morgan** — HTTP request logging middleware
+
+Key patterns to preserve:
+- `loggerFactory(name: string): Logger` as the sole logger creation API
+- Hierarchical colon-delimited namespaces with glob pattern matching
+- Environment variables (`DEBUG`, `TRACE`, etc.) overriding config file levels
+- Dev: human-readable output; Prod: JSON output (toggleable via `FORMAT_NODE_LOG`)
+- Browser: console output with error-level default in production
+
+### Architecture Pattern & Boundary Map
+
+```mermaid
+graph TB
+    subgraph ConsumerApps[Consumer Applications]
+        App[apps/app]
+        Slackbot[apps/slackbot-proxy]
+    end
+
+    subgraph ConsumerPkgs[Consumer Packages]
+        Slack[packages/slack]
+        Remark[packages/remark-attachment-refs]
+    end
+
+    subgraph GrowiLogger[@growi/logger]
+        Factory[LoggerFactory]
+        LevelResolver[LevelResolver]
+        EnvParser[EnvVarParser]
+        TransportSetup[TransportFactory]
+        HttpLogger[HttpLoggerFactory]
+    end
+
+    subgraph External[External Packages]
+        Pino[pino v9.x]
+        PinoPretty[pino-pretty]
+        PinoHttp[pino-http]
+        Minimatch[minimatch]
+    end
+
+    App --> Factory
+    App --> HttpLogger
+    Slackbot --> Factory
+    Slackbot --> HttpLogger
+    Slack --> Factory
+    Remark --> Factory
+
+    Factory --> LevelResolver
+    Factory --> TransportSetup
+    LevelResolver --> EnvParser
+    LevelResolver --> Minimatch
+
+    Factory --> Pino
+    TransportSetup --> PinoPretty
+
+    HttpLogger --> Factory
+    HttpLogger --> PinoHttp
+```
+
+**Architecture Integration**:
+- Selected pattern: Wrapper package (`@growi/logger`) encapsulating pino configuration — mirrors universal-bunyan's role
+- Domain boundary: `@growi/logger` owns all logger creation, level resolution, and transport setup; consumer apps only call `loggerFactory(name)`
+- Existing patterns preserved: factory function signature, namespace conventions, config file structure
+- New components: `LevelResolver` (namespace-to-level matching), `TransportFactory` (dev/prod stream setup), `EnvVarParser` (env variable parsing)
+- Steering compliance: shared package in `packages/` follows monorepo conventions
+- **Dev-only isolation**: modules that are only used in development (`bunyan-format`, `morgan-like-format-options`) reside under `src/dev/` to make the boundary explicit; all are loaded via dynamic import, never statically bundled in production
+
+### Technology Stack
+
+| Layer | Choice / Version | Role in Feature | Notes |
+|-------|------------------|-----------------|-------|
+| Logging Core | pino v9.x | Structured JSON logger for Node.js and browser | Pinned to v9.x for OTel compatibility; see research.md |
+| Dev Formatting | pino-pretty v13.x | Human-readable log output in development | Used as transport (worker thread) |
+| HTTP Logging | pino-http v11.x | Express middleware for request/response logging | Dependency of @growi/logger; not directly imported by consumer apps |
+| Glob Matching | minimatch (existing) | Namespace pattern matching for level config | Already a transitive dependency via universal-bunyan |
+| Shared Package | @growi/logger | Logger factory with namespace/config/env support and HTTP middleware | New package in packages/logger/ |
+
+## System Flows
+
+### Logger Creation Flow
+
+```mermaid
+sequenceDiagram
+    participant App as Application Startup
+    participant Factory as LoggerFactory
+    participant Transport as pino.transport (Worker)
+    participant Root as Root pino Logger
+
+    App->>Factory: initializeLoggerFactory(options)
+    Factory->>Transport: pino.transport(config) — spawns ONE Worker thread
+    Transport-->>Factory: transport stream
+    Factory->>Root: pino({ level: 'trace' }, transport)
+    Root-->>Factory: rootLogger stored in module scope
+```
+
+```mermaid
+sequenceDiagram
+    participant Consumer as Consumer Module
+    participant Factory as LoggerFactory
+    participant Cache as Logger Cache
+    participant Resolver as LevelResolver
+    participant Root as Root pino Logger
+
+    Consumer->>Factory: loggerFactory(namespace)
+    Factory->>Cache: lookup(namespace)
+    alt Cache hit
+        Cache-->>Factory: cached child logger
+    else Cache miss
+        Factory->>Resolver: resolveLevel(namespace, config, envOverrides)
+        Resolver-->>Factory: resolved level
+        Factory->>Root: rootLogger.child({ name: namespace })
+        Root-->>Factory: child logger (shares Worker thread)
+        Factory->>Factory: childLogger.level = resolved level
+        Factory->>Cache: store(namespace, childLogger)
+    end
+    Factory-->>Consumer: Logger
+```
+
+### Level Resolution Flow
+
+```mermaid
+flowchart TD
+    Start[resolveLevel namespace] --> EnvCheck{Env var match?}
+    EnvCheck -->|Yes| EnvLevel[Use env var level]
+    EnvCheck -->|No| ConfigCheck{Config pattern match?}
+    ConfigCheck -->|Yes| ConfigLevel[Use config level]
+    ConfigCheck -->|No| DefaultLevel[Use config default level]
+
+    EnvLevel --> Done[Return level]
+    ConfigLevel --> Done
+    DefaultLevel --> Done
+```
+
+## Requirements Traceability
+
+| Requirement | Summary | Components | Interfaces | Flows |
+|-------------|---------|------------|------------|-------|
+| 1.1–1.4 | Logger factory with namespace support | LoggerFactory, LoggerCache | `loggerFactory()` | Logger Creation |
+| 2.1–2.4 | Config-file level control | LevelResolver, ConfigLoader | `LoggerConfig` type | Level Resolution |
+| 3.1–3.5 | Env var level override | EnvVarParser, LevelResolver | `parseEnvLevels()` | Level Resolution |
+| 4.1–4.4 | Platform-aware logger | LoggerFactory, TransportFactory | `createTransport()` | Logger Creation |
+| 5.1–5.4 | Dev/prod output formatting | TransportFactory | `TransportOptions` | Logger Creation |
+| 6.1–6.4 | HTTP request logging | HttpLoggerMiddleware | `createHttpLogger()` | — |
+| 7.1–7.3 | OpenTelemetry integration | DiagLoggerPinoAdapter | `DiagLogger` interface | — |
+| 8.1–8.5 | Multi-app consistency | @growi/logger package | Package exports | — |
+| 9.1–9.3 | Dependency cleanup | — (removal task) | — | — |
+| 10.1–10.3 | Backward-compatible API | LoggerFactory | `Logger` type export | — |
+| 11.1–11.4 | Pino performance preservation | LoggerFactory | `initializeLoggerFactory`, shared root logger | Logger Creation |
+| 12.1–12.6 | Bunyan-like output format | BunyanFormatTransport, TransportFactory | Custom transport target | Logger Creation |
+| 13.1–13.5 | HTTP logger encapsulation | HttpLoggerFactory | `createHttpLoggerMiddleware()` | — |
+
+## Components and Interfaces
+
+| Component | Domain/Layer | Intent | Req Coverage | Key Dependencies | Contracts |
+|-----------|-------------|--------|--------------|-----------------|-----------|
+| LoggerFactory | @growi/logger / Core | Create and cache namespace-bound pino loggers | 1, 4, 8, 10, 11 | pino (P0), LevelResolver (P0), TransportFactory (P0) | Service |
+| LevelResolver | @growi/logger / Core | Resolve log level for a namespace from config + env | 2, 3 | minimatch (P0), EnvVarParser (P0) | Service |
+| EnvVarParser | @growi/logger / Core | Parse env vars into namespace-level map | 3 | — | Service |
+| TransportFactory | @growi/logger / Core | Create pino transport/options for Node.js and browser | 4, 5, 12 | pino-pretty (P1) | Service |
+| BunyanFormatTransport | @growi/logger / Transport | Custom pino transport producing bunyan-format "short" output | 12 | pino-pretty (P1) | Transport |
+| HttpLoggerFactory | @growi/logger / Core | Factory for pino-http Express middleware | 6, 13 | pino-http (P0), LoggerFactory (P0) | Service |
+| DiagLoggerPinoAdapter | apps/app / OpenTelemetry | Wrap pino logger as OTel DiagLogger | 7 | pino (P0) | Service |
+| ConfigLoader | Per-app | Load dev/prod config files | 2 | — | — |
+
+### @growi/logger Package
+
+#### LoggerFactory
+
+| Field | Detail |
+|-------|--------|
+| Intent | Central entry point for creating namespace-bound pino loggers with level resolution and caching |
+| Requirements | 1.1, 1.2, 1.3, 1.4, 4.1, 8.5, 10.1, 10.3 |
+
+**Responsibilities & Constraints**
+- Create pino logger instances with resolved level and transport configuration
+- Cache logger instances per namespace to ensure singleton behavior
+- Detect platform (Node.js vs browser) and apply appropriate configuration
+- Expose `loggerFactory(name: string): pino.Logger` as the public API
+
+**Dependencies**
+- Outbound: LevelResolver — resolve level for namespace (P0)
+- Outbound: TransportFactory — create transport options (P0)
+- External: pino v9.x — logger creation (P0)
+
+**Contracts**: Service [x]
+
+##### Service Interface
+
+```typescript
+import type { Logger } from 'pino';
+
+interface LoggerConfig {
+  [namespacePattern: string]: string; // pattern → level ('info', 'debug', etc.)
+}
+
+interface LoggerFactoryOptions {
+  config: LoggerConfig;
+}
+
+/**
+ * Initialize the logger factory module with configuration.
+ * Must be called once at application startup before any loggerFactory() calls.
+ */
+function initializeLoggerFactory(options: LoggerFactoryOptions): void;
+
+/**
+ * Create or retrieve a cached pino logger for the given namespace.
+ */
+function loggerFactory(name: string): Logger;
+```
+
+- Preconditions: `initializeLoggerFactory()` called before first `loggerFactory()` call
+- Postconditions: Returns a pino.Logger bound to the namespace with resolved level
+- Invariants: Same namespace always returns the same logger instance
+
+**Implementation Notes**
+- The `initializeLoggerFactory` is called once per app at startup, receiving the merged dev/prod config
+- Browser detection: `typeof window !== 'undefined' && typeof window.document !== 'undefined'`
+- In browser mode, skip transport setup and use pino's built-in `browser` option
+- The factory is a module-level singleton (module scope cache + config)
+- **Performance critical**: `pino.transport()` spawns a Worker thread. It MUST be called **once** inside `initializeLoggerFactory`, not inside `loggerFactory`. Each `loggerFactory(name)` call creates a child logger via `rootLogger.child({ name })` which shares the single Worker thread. Calling `pino.transport()` per namespace would spawn N Worker threads for N namespaces, negating pino's core performance advantage.
+
+#### LevelResolver
+
+| Field | Detail |
+|-------|--------|
+| Intent | Determine the effective log level for a given namespace by matching against config patterns and env var overrides |
+| Requirements | 2.1, 2.3, 2.4, 3.1, 3.2, 3.3, 3.4, 3.5 |
+
+**Responsibilities & Constraints**
+- Match namespace against glob patterns in config (using minimatch)
+- Match namespace against env var-derived patterns (env vars take precedence)
+- Return the most specific matching level, or the `default` level as fallback
+- Parse is done once at module initialization; resolution is per-namespace at logger creation time
+
+**Dependencies**
+- Outbound: EnvVarParser — get env-derived level map (P0)
+- External: minimatch — glob pattern matching (P0)
+
+**Contracts**: Service [x]
+
+##### Service Interface
+
+```typescript
+interface LevelResolver {
+  /**
+   * Resolve the log level for a namespace.
+   * Priority: env var match > config pattern match > config default.
+   */
+  resolveLevel(
+    namespace: string,
+    config: LoggerConfig,
+    envOverrides: LoggerConfig,
+  ): string;
+}
+```
+
+- Preconditions: `config` contains a `default` key
+- Postconditions: Returns a valid pino log level string
+- Invariants: Env overrides always take precedence over config
+
+#### EnvVarParser
+
+| Field | Detail |
+|-------|--------|
+| Intent | Parse environment variables (DEBUG, TRACE, INFO, WARN, ERROR, FATAL) into a namespace-to-level map |
+| Requirements | 3.1, 3.4, 3.5 |
+
+**Responsibilities & Constraints**
+- Read `process.env.DEBUG`, `process.env.TRACE`, etc.
+- Split comma-separated values into individual namespace patterns
+- Return a flat `LoggerConfig` map: `{ 'growi:*': 'debug', 'growi:service:page': 'trace' }`
+- Parsed once at module load time (not per-logger)
+
+**Contracts**: Service [x]
+
+##### Service Interface
+
+```typescript
+/**
+ * Parse log-level environment variables into a namespace-to-level map.
+ * Reads: DEBUG, TRACE, INFO, WARN, ERROR, FATAL from process.env.
+ */
+function parseEnvLevels(): LoggerConfig;
+```
+
+- Preconditions: Environment is available (`process.env`)
+- Postconditions: Returns a map where each key is a namespace pattern and value is a level string
+- Invariants: Only the six known env vars are read; unknown vars are ignored
+
+#### TransportFactory
+
+| Field | Detail |
+|-------|--------|
+| Intent | Create pino transport configuration appropriate for the current environment |
+| Requirements | 4.1, 4.2, 4.3, 4.4, 5.1, 5.2, 5.3, 5.4, 12.1, 12.6, 12.7, 12.8 |
+
+**Responsibilities & Constraints**
+- Node.js development: return BunyanFormatTransport config (`singleLine: false`) — **dev only, not imported in production**
+- Node.js production + `FORMAT_NODE_LOG`: return standard `pino-pretty` transport with `singleLine: true` (not bunyan-format)
+- Node.js production default: return raw JSON (stdout) — no transport
+- Browser: return pino `browser` option config (console output, production error-level default)
+- Include `name` field in all output via pino's `name` option
+
+**Contracts**: Service [x]
+
+##### Service Interface
+
+```typescript
+import type { LoggerOptions } from 'pino';
+
+interface TransportConfig {
+  /** Pino options for Node.js environment */
+  nodeOptions: Partial<LoggerOptions>;
+  /** Pino options for browser environment */
+  browserOptions: Partial<LoggerOptions>;
+}
+
+/**
+ * Create transport configuration based on environment.
+ * @param isProduction - Whether NODE_ENV is 'production'
+ */
+function createTransportConfig(isProduction: boolean): TransportConfig;
+```
+
+- Preconditions: Called during logger factory initialization
+- Postconditions: Returns valid pino options for the detected environment
+- Invariants: Browser options never include Node.js transports
+
+**Implementation Notes**
+- Dev transport: `{ target: '<resolved-path>/dev/bunyan-format.js' }` — target path resolved via `path.join(path.dirname(fileURLToPath(import.meta.url)), 'dev', 'bunyan-format.js')`; no `options` passed (singleLine defaults to false inside the module)
+- Prod with FORMAT_NODE_LOG: `{ target: 'pino-pretty', options: { translateTime: 'SYS:standard', ignore: 'pid,hostname', singleLine: true } }` — standard pino-pretty, no custom prettifiers
+- Prod without FORMAT_NODE_LOG (or false): raw JSON to stdout (no transport)
+- Browser production: `{ browser: { asObject: false }, level: 'error' }`
+- Browser development: `{ browser: { asObject: false } }` (inherits resolved level)
+- **Important**: The bunyan-format transport path is only resolved/referenced in the dev branch, ensuring the module is never imported in production
+
+#### BunyanFormatTransport
+
+| Field | Detail |
+|-------|--------|
+| Intent | Custom pino transport that produces bunyan-format "short" mode output (development only) |
+| Requirements | 12.1, 12.2, 12.3, 12.4, 12.5, 12.6, 12.7 |
+
+**Responsibilities & Constraints**
+- Loaded by `pino.transport()` in a Worker thread — must be a module file, not inline functions
+- Uses pino-pretty internally with `customPrettifiers` to match bunyan-format "short" layout
+- **Development only**: This module is only referenced by TransportFactory in the dev branch; never imported in production
+
+**Dependencies**
+- External: pino-pretty v13.x (P1) — used internally for colorization and base formatting
+
+**Contracts**: Transport [x]
+
+##### Transport Module
+
+```typescript
+// packages/logger/src/dev/bunyan-format.ts
+// Default export: function(opts) → Writable stream (pino transport protocol)
+
+interface BunyanFormatOptions {
+  singleLine?: boolean;
+  colorize?: boolean;
+  destination?: NodeJS.WritableStream;
+}
+```
+
+**Implementation Notes**
+- Uses `messageFormat` in pino-pretty to produce the full line: timestamp + level + name + message
+- `ignore: 'pid,hostname,name,req,res,responseTime'` — suppresses pino-http's verbose req/res objects in dev; the morgan-like `customSuccessMessage` already provides method/URL/status/time on the same line
+- `customPrettifiers: { time: () => '', level: () => '' }` — suppresses pino-pretty's default time/level rendering (handled inside `messageFormat`)
+- Level right-alignment and colorization are implemented inside `messageFormat` using ANSI codes
+- `singleLine` defaults to `false` inside the module; no `options` need to be passed from TransportFactory
+- Since the transport is a separate module loaded by the Worker thread, function options work (no serialization issue)
+- Vite's `preserveModules` ensures `src/dev/bunyan-format.ts` → `dist/dev/bunyan-format.js`
+- `NO_COLOR` environment variable is respected to disable colorization
+
+##### Output Examples
+
+**Dev** (bunyan-format, singleLine: false):
+```
+10:06:30.419Z DEBUG growi:service:PassportService: LdapStrategy: serverUrl is invalid
+10:06:30.420Z  WARN growi:service:PassportService: SamlStrategy: cert is not set.
+    extra: {"field":"value"}
+```
+
+**Dev HTTP log** (bunyan-format + morgan-like format, req/res suppressed):
+```
+10:06:30.730Z  INFO express: GET /applicable-grant?pageId=abc 304 - 16ms
+```
+
+**Prod + FORMAT_NODE_LOG** (standard pino-pretty, singleLine: true):
+```
+[2026-03-30 12:00:00.000] INFO (growi:service:search): Elasticsearch is enabled
+```
+
+**Prod default**: raw JSON (no transport, unchanged)
+
+### HTTP Logging Layer
+
+#### HttpLoggerFactory
+
+| Field | Detail |
+|-------|--------|
+| Intent | Encapsulate pino-http middleware creation within @growi/logger so consumers don't depend on pino-http |
+| Requirements | 6.1, 6.2, 6.3, 6.4, 13.1, 13.2, 13.3, 13.4, 13.5, 13.6 |
+
+**Responsibilities & Constraints**
+- Create pino-http middleware using a logger from LoggerFactory
+- In development mode: dynamically import and apply `morganLikeFormatOptions` (customSuccessMessage, customErrorMessage, customLogLevel)
+- In production mode: use pino-http's default message format (no morgan-like module imported)
+- Accept optional `autoLogging` configuration for route filtering
+- Return Express-compatible middleware
+- Encapsulate `pino-http` as an internal dependency of `@growi/logger`
+
+**Dependencies**
+- External: pino-http v11.x (P0)
+- Inbound: LoggerFactory — provides base logger (P0)
+
+**Contracts**: Service [x]
+
+##### Service Interface
+
+```typescript
+import type { RequestHandler } from 'express';
+
+interface HttpLoggerOptions {
+  /** Logger namespace, defaults to 'express' */
+  namespace?: string;
+  /** Auto-logging configuration (e.g., route ignore patterns) */
+  autoLogging?: {
+    ignore: (req: { url?: string }) => boolean;
+  };
+}
+
+/**
+ * Create Express middleware for HTTP request logging.
+ * In dev: uses pino-http with morgan-like formatting (dynamically imported).
+ * In prod: uses pino-http with default formatting.
+ */
+async function createHttpLoggerMiddleware(options?: HttpLoggerOptions): Promise<RequestHandler>;
+```
+
+- Preconditions: LoggerFactory initialized
+- Postconditions: Returns Express middleware that logs HTTP requests
+- Invariants: morganLikeFormatOptions applied only in dev; static file paths skipped when autoLogging.ignore provided
+
+**Implementation Notes**
+- The type assertion for Logger<string> → pino-http's Logger is handled internally, hidden from consumers
+- `pino-http` moves from apps' dependencies to `@growi/logger`'s dependencies
+- **Browser compatibility**: `pino-http` is imported lazily inside the function body (`const { default: pinoHttp } = await import('pino-http')`) rather than at the module top-level. This prevents bundlers (Turbopack/webpack) from pulling the Node.js-only `pino-http` into browser bundles when `@growi/logger` is imported by shared code
+- `morganLikeFormatOptions` is dynamically imported (`await import('./dev/morgan-like-format-options')`) only when `NODE_ENV !== 'production'`, ensuring the module is not loaded in production
+- The function is `async` to support the dynamic imports; consumers call: `express.use(await createHttpLoggerMiddleware({ autoLogging: { ignore: ... } }))`
+
+### OpenTelemetry Layer
+
+#### DiagLoggerPinoAdapter
+
+| Field | Detail |
+|-------|--------|
+| Intent | Adapt a pino logger to the OpenTelemetry DiagLogger interface |
+| Requirements | 7.1, 7.2, 7.3 |
+
+**Responsibilities & Constraints**
+- Implement the OTel `DiagLogger` interface (`error`, `warn`, `info`, `debug`, `verbose`)
+- Map `verbose()` to pino's `trace()` level
+- Parse JSON strings in message arguments (preserving current behavior)
+- Disable `@opentelemetry/instrumentation-pino` if enabled by default
+
+**Dependencies**
+- External: pino v9.x (P0)
+- External: @opentelemetry/api (P0)
+
+**Contracts**: Service [x]
+
+##### Service Interface
+
+```typescript
+import type { DiagLogger } from '@opentelemetry/api';
+
+/**
+ * Create a DiagLogger that delegates to a pino logger.
+ * Maps OTel verbose level to pino trace level.
+ */
+function createDiagLoggerAdapter(): DiagLogger;
+```
+
+- Preconditions: LoggerFactory initialized, pino logger available for OTel namespace
+- Postconditions: Returns a valid DiagLogger implementation
+- Invariants: All DiagLogger methods delegate to the corresponding pino level
+
+**Implementation Notes**
+- Minimal change from current `DiagLoggerBunyanAdapter` — rename class, update import from bunyan to pino
+- `parseMessage` helper can remain largely unchanged
+- In OTel SDK configuration, replace `'@opentelemetry/instrumentation-bunyan': { enabled: false }` with `'@opentelemetry/instrumentation-pino': { enabled: false }` if the instrumentation package is present
+
+## Data Models
+
+Not applicable. This feature modifies runtime logging behavior and does not introduce or change persisted data models.
+
+## Error Handling
+
+### Error Strategy
+Logging infrastructure must be resilient — a logger failure must never crash the application.
+
+### Error Categories and Responses
+- **Missing config file**: Fall back to `{ default: 'info' }` and emit a console warning
+- **Invalid log level in config/env**: Ignore the entry and log a warning to stderr
+- **Transport initialization failure** (pino-pretty not available): Fall back to raw JSON output
+- **Logger creation failure**: Return a no-op logger that silently discards messages
+
+### Monitoring
+- Logger initialization errors are written to `process.stderr` directly (cannot use the logger itself)
+- No additional monitoring infrastructure required — this is the monitoring infrastructure
+
+## Addendum: Formatting Improvements (Post-Migration)
+
+> Added 2026-03-30. The core migration is complete. This section covers log output readability improvements based on operator feedback.
+
+### Background
+
+- Morgan was used in dev because bunyan's express logging was too verbose
+- Morgan's one-liner format (`GET /path 200 12ms`) was valued for readability
+- `FORMAT_NODE_LOG=true` should produce concise one-liner logs suitable for quick-glance monitoring
+- Production default should remain structured JSON (already working via `.env.production`)
+
+### Gap Summary
+
+| Gap | Issue | Resolution |
+|-----|-------|------------|
+| A | `singleLine: false` in prod FORMAT_NODE_LOG path | Change to `singleLine: true` |
+| B | `FORMAT_NODE_LOG` defaults to formatted when unset | Defer to separate PR (`.env.production` handles this) |
+| C | pino-http uses default verbose messages | Add `customSuccessMessage` / `customErrorMessage` / `customLogLevel` |
+| D | Dev and prod pino-pretty configs identical | Differentiate via `singleLine` |
+
+### Change 1: TransportFactory — Differentiated `singleLine`
+
+**File**: `packages/logger/src/transport-factory.ts`
+
+Current production + FORMAT_NODE_LOG branch uses `singleLine: false`. Change to `singleLine: true`:
+
+```
+Dev:                    singleLine: false  (unchanged — full context)
+Prod + FORMAT_NODE_LOG: singleLine: true   (concise one-liners)
+Prod default:           raw JSON           (unchanged)
+```
+
+The dev branch remains multi-line so developers see full object context. The production formatted path becomes single-line for operator readability.
+
+### Change 2: HttpLoggerMiddleware — Custom Message Format
+
+**Files**: `apps/app/src/server/crowi/index.ts`, `apps/slackbot-proxy/src/Server.ts`
+
+Add pino-http message customization to produce morgan-like output:
+
+```typescript
+const customSuccessMessage: PinoHttpOptions['customSuccessMessage'] = (req, res, responseTime) => {
+  return `${req.method} ${req.url} ${res.statusCode} - ${Math.round(responseTime)}ms`;
+};
+
+const customErrorMessage: PinoHttpOptions['customErrorMessage'] = (req, res, error) => {
+  return `${req.method} ${req.url} ${res.statusCode} - ${error.message}`;
+};
+
+const customLogLevel: PinoHttpOptions['customLogLevel'] = (_req, res, error) => {
+  if (error != null || res.statusCode >= 500) return 'error';
+  if (res.statusCode >= 400) return 'warn';
+  return 'info';
+};
+```
+
+### Output Examples (Updated with dev-only bunyan-like format)
+
+**Dev** (bunyan-format transport + morgan-like HTTP messages):
+```
+10:06:30.419Z  INFO express: GET /page/path 200 - 12ms
+    req: {"method":"GET","url":"/page/path"}
+    res: {"statusCode":200}
+```
+
+**Prod + FORMAT_NODE_LOG=true** (standard pino-pretty, default pino-http messages):
+```
+[2026-03-30 12:00:00.000] INFO (express): request completed
+```
+
+**Prod default** (JSON, default pino-http messages):
+```json
+{"level":30,"time":1711792800000,"name":"express","msg":"request completed","req":{"method":"GET","url":"/page/path"},"res":{"statusCode":200},"responseTime":12}
+```
+
+### Testing
+
+- `transport-factory.spec.ts`: Verify transport target contains `bunyan-format` (not pino-pretty directly); dev transport passes no options (singleLine handled inside bunyan-format); prod + FORMAT_NODE_LOG returns pino-pretty with `singleLine: true`
+- `bunyan-format.spec.ts`: Verify transport module produces `HH:mm:ss.SSSZ LEVEL name: message` format; verify req/res are excluded from output
+- `http-logger.spec.ts`: Verify `createHttpLoggerMiddleware` returns middleware, applies morganLikeFormatOptions in dev, passes autoLogging options
+- `morgan-like-format-options.spec.ts`: Verify message formats using `strip()` to remove ANSI codes before assertion; verify customLogLevel returns correct levels for 2xx/4xx/5xx
+
+---
+
+## Addendum: HTTP Logger Encapsulation (Post-Migration)
+
+> Added 2026-04-02. Moves pino-http usage from consumer apps into @growi/logger.
+
+### Background
+
+- Consumer apps (`apps/app`, `apps/slackbot-proxy`) currently import `pino-http` directly
+- This leaks implementation details and requires each app to configure morgan-like format options
+- Encapsulating in `@growi/logger` provides a single configuration point and cleaner dependency graph
+
+### Changes
+
+1. **New file**: `packages/logger/src/http-logger.ts` — exports `createHttpLoggerMiddleware(options)`
+2. **Package.json**: Add `pino-http` to `@growi/logger` dependencies
+3. **apps/app**: Replace direct `pino-http` import with `createHttpLoggerMiddleware` from `@growi/logger`
+4. **apps/slackbot-proxy**: Same as apps/app
+5. **Cleanup**: Remove `pino-http` from apps' direct dependencies (keep in @growi/logger)
+
+---
+
+## Addendum: Dev-Only Module Isolation and Browser Compatibility (Post-Migration)
+
+> Added 2026-04-06. Restructures dev-only modules and fixes browser bundle compatibility.
+
+### Background
+
+- `bunyan-format` and `morgan-like-format-options` were mixed with production modules at the `src/` root level
+- `pino-http` imported at the module top-level caused browser bundle errors (Turbopack: `TypeError: __turbopack_context__.r(...).symbols is undefined`) when `@growi/logger` was imported by shared page code
+- HTTP request logs in dev were verbose (multi-line `req`/`res` JSON objects)
+- HTTP status codes in dev lacked visual differentiation
+
+### Changes
+
+1. **`src/dev/` directory**: All dev-only modules moved under `src/dev/`
+   - `src/transports/bunyan-format.ts` → `src/dev/bunyan-format.ts`
+   - `src/morgan-like-format-options.ts` → `src/dev/morgan-like-format-options.ts`
+   - `src/transports/` directory removed
+2. **`index.ts`**: Removed static `export { morganLikeFormatOptions }` — dev-only module must not appear in production-facing package exports
+3. **`http-logger.ts`**: `pino-http` import moved from module top-level into the async function body (`const { default: pinoHttp } = await import('pino-http')`) — prevents browser bundlers from including the Node.js-only package
+4. **`bunyan-format.ts`**: `ignore` extended to `'pid,hostname,name,req,res,responseTime'` — suppresses verbose pino-http req/res objects; morgan-like `customSuccessMessage` already provides all relevant HTTP metadata on one line
+5. **`morgan-like-format-options.ts`**: ANSI color codes added for status code (2xx=green, 3xx=cyan, 4xx=yellow, 5xx=red) and dim response time; `NO_COLOR` env var respected

+ 156 - 0
.kiro/specs/migrate-logger-to-pino/requirements.md

@@ -0,0 +1,156 @@
+# Requirements Document
+
+## Introduction
+
+GROWI currently uses bunyan as its logging library, wrapped by the custom `universal-bunyan` package (developed by WeSeek). The system provides namespace-based hierarchical logging with environment variable-driven log level control, platform detection (Node.js/Browser), and different output formatting for development and production environments. Morgan is used for HTTP request logging in development mode while `express-bunyan-logger` handles production HTTP logging.
+
+This specification covers the complete migration from bunyan to pino, replacing `universal-bunyan` with an equivalent pino-based solution, and eliminating morgan by consolidating HTTP request logging under pino. The migration must preserve all existing functionality without degradation.
+
+### Current Components to Replace
+- `bunyan` → `pino`
+- `universal-bunyan` (custom) → pino-based equivalent (official packages preferred, custom wrapper where needed)
+- `bunyan-format` → pino transport equivalent (e.g., `pino-pretty`)
+- `express-bunyan-logger` → `pino-http` or equivalent
+- `morgan` (dev only) → consolidated into pino-http
+- `browser-bunyan` / `@browser-bunyan/console-formatted-stream` → pino browser mode or equivalent
+- `@types/bunyan` → pino's built-in types
+
+## Requirements
+
+### Requirement 1: Logger Factory with Namespace Support
+
+**Objective:** As a developer, I want to create loggers with hierarchical namespace identifiers (e.g., `growi:service:page`), so that I can identify the source of log messages and control granularity per module.
+
+#### Acceptance Criteria
+1. The Logger Factory shall provide a `loggerFactory(name: string)` function that returns a logger instance bound to the given namespace.
+2. When `loggerFactory` is called multiple times with the same namespace, the Logger Factory shall return the same cached logger instance.
+3. The Logger Factory shall support colon-delimited hierarchical namespaces (e.g., `growi:crowi`, `growi:routes:login`).
+4. The Logger Factory shall maintain API compatibility so that callers use `logger.info()`, `logger.debug()`, `logger.warn()`, `logger.error()`, `logger.trace()`, and `logger.fatal()` without changes to call sites.
+
+### Requirement 2: Namespace-Based Log Level Configuration via Config Files
+
+**Objective:** As a developer, I want to define per-namespace log levels in configuration files (separate for dev and prod), so that I can fine-tune verbosity for specific modules without restarting with different env vars.
+
+#### Acceptance Criteria
+1. The Logger Factory shall load a configuration object mapping namespace patterns to log levels (e.g., `{ 'growi:service:*': 'debug', 'default': 'info' }`).
+2. The Logger Factory shall select the dev or prod configuration based on the `NODE_ENV` environment variable.
+3. The Logger Factory shall support glob pattern matching (e.g., `growi:service:*`) for namespace-to-level mapping using minimatch-compatible syntax.
+4. When no specific namespace match exists, the Logger Factory shall fall back to the `default` level defined in the configuration.
+
+### Requirement 3: Environment Variable-Based Log Level Override
+
+**Objective:** As an operator, I want to override log levels at runtime via environment variables, so that I can enable debug/trace logging for specific namespaces without modifying code or config files.
+
+#### Acceptance Criteria
+1. The Logger Factory shall read the environment variables `DEBUG`, `TRACE`, `INFO`, `WARN`, `ERROR`, and `FATAL` to parse namespace patterns.
+2. When an environment variable (e.g., `DEBUG=growi:routes:*,growi:service:page`) is set, the Logger Factory shall apply the corresponding log level to all matching namespaces.
+3. When both a config file entry and an environment variable match the same namespace, the environment variable shall take precedence.
+4. The Logger Factory shall support comma-separated namespace patterns within a single environment variable value.
+5. The Logger Factory shall support glob wildcard patterns (e.g., `growi:*`) in environment variable values.
+
+### Requirement 4: Platform-Aware Logger (Node.js and Browser)
+
+**Objective:** As a developer, I want the logger to work seamlessly in both Node.js (server) and browser (client) environments, so that I can use the same `loggerFactory` import in universal/shared code.
+
+#### Acceptance Criteria
+1. The Logger Factory shall detect the runtime environment (Node.js vs browser) and instantiate the appropriate logger implementation.
+2. While running in a browser environment, the Logger Factory shall output logs to the browser's developer console with readable formatting.
+3. While running in a browser production environment, the Logger Factory shall default to `error` level to minimize console noise.
+4. While running in a Node.js environment, the Logger Factory shall output structured logs suitable for machine parsing or human-readable formatting depending on configuration.
+
+### Requirement 5: Output Formatting (Development vs Production)
+
+**Objective:** As a developer/operator, I want distinct log output formats for development and production, so that dev logs are human-readable while production logs are structured and parseable.
+
+#### Acceptance Criteria
+1. While `NODE_ENV` is not `production`, the Logger Factory shall output human-readable formatted logs (equivalent to bunyan-format `short` mode) using pino-pretty or an equivalent transport.
+2. While `NODE_ENV` is `production`, the Logger Factory shall output structured JSON logs by default.
+3. Where the `FORMAT_NODE_LOG` environment variable is set, the Logger Factory shall respect it to toggle between formatted and raw JSON output in production (formatted by default when `FORMAT_NODE_LOG` is unset or truthy).
+4. The Logger Factory shall include the logger namespace in all log output so that the source module is identifiable.
+
+### Requirement 6: HTTP Request Logging
+
+**Objective:** As a developer/operator, I want HTTP request logging integrated with pino, so that request/response metadata is captured in a consistent format alongside application logs, eliminating the need for morgan.
+
+#### Acceptance Criteria
+1. The GROWI Server shall log HTTP requests using `pino-http` or an equivalent pino-based middleware, replacing both `morgan` (dev) and `express-bunyan-logger` (prod).
+2. While in development mode, the HTTP Logger shall skip logging for Next.js static file requests (paths starting with `/_next/static/`).
+3. The HTTP Logger shall use a logger instance obtained from the Logger Factory with the namespace `express` (or equivalent) for consistency with existing log namespaces.
+4. The HTTP Logger shall include standard HTTP metadata (method, URL, status code, response time) in log entries.
+
+### Requirement 7: OpenTelemetry Integration
+
+**Objective:** As a developer, I want the pino-based logger to integrate with OpenTelemetry diagnostics, so that observability tooling continues to function after migration.
+
+#### Acceptance Criteria
+1. The OpenTelemetry DiagLogger adapter shall be updated to wrap pino instead of bunyan.
+2. The OpenTelemetry DiagLogger adapter shall map OpenTelemetry verbose level to pino trace level.
+3. The OpenTelemetry SDK configuration shall disable pino instrumentation if an equivalent auto-instrumentation exists (analogous to the current bunyan instrumentation disable).
+
+### Requirement 8: Multi-App Consistency
+
+**Objective:** As a developer, I want all GROWI monorepo applications to use the same pino-based logging solution, so that logging behavior and configuration are consistent across the platform.
+
+#### Acceptance Criteria
+1. The `apps/app` application shall use the pino-based Logger Factory.
+2. The `apps/slackbot-proxy` application shall use the pino-based Logger Factory.
+3. The `packages/slack` package shall use the pino-based Logger Factory.
+4. The `packages/remark-attachment-refs` package shall use the pino-based Logger Factory.
+5. The Logger Factory shall be published as a shared package within the monorepo so that all consumers import from a single source.
+
+### Requirement 9: Dependency Cleanup
+
+**Objective:** As a maintainer, I want all bunyan-related and morgan dependencies removed after migration, so that the dependency tree is clean and there is no dead code.
+
+#### Acceptance Criteria
+1. When migration is complete, the monorepo shall have no references to `bunyan`, `universal-bunyan`, `bunyan-format`, `express-bunyan-logger`, `browser-bunyan`, `@browser-bunyan/console-formatted-stream`, or `@types/bunyan` in any `package.json`.
+2. When migration is complete, the monorepo shall have no references to `morgan` or `@types/morgan` in any `package.json`.
+3. When migration is complete, no source file shall contain imports or requires of the removed packages.
+
+### Requirement 11: Preserve Pino's Performance Characteristics
+
+**Objective:** As a developer, I want the logger implementation to honour pino's design philosophy of minimal overhead in the main thread, so that migrating from bunyan does not introduce performance regressions.
+
+#### Acceptance Criteria
+1. The Logger Factory shall create pino's worker-thread transport (`pino.transport()`) **at most once** per application lifetime (i.e., during `initializeLoggerFactory`), regardless of the number of unique namespaces.
+2. The Logger Factory shall create per-namespace loggers by calling `.child()` on a shared root pino instance, not by calling `pino()` and `pino.transport()` independently for each namespace.
+3. The Logger Factory shall not perform any blocking I/O or expensive computation on the hot path of each log method call (level-checking is performed by pino's internal mechanism and is acceptable).
+4. The number of active Worker threads used by the logger subsystem shall remain constant after the first call to `loggerFactory()`, regardless of how many distinct namespaces are subsequently requested.
+
+### Requirement 10: Backward-Compatible Log API
+
+**Objective:** As a developer, I want the new logger to expose the same method signatures as the current bunyan logger, so that existing log call sites require minimal or no changes.
+
+#### Acceptance Criteria
+1. The pino logger shall support `.info()`, `.debug()`, `.warn()`, `.error()`, `.trace()`, and `.fatal()` methods with the same argument patterns as bunyan (message string, optional object, optional error).
+2. If bunyan-specific APIs (e.g., `logger.child()`, serializers) are used at any call sites, the pino equivalent shall be provided or the call site shall be adapted.
+3. The Logger Factory shall export a TypeScript type for the logger instance that is compatible with the pino Logger type.
+
+### Requirement 12: Bunyan-Like Output Format (Development Only)
+
+**Objective:** As a developer, I want the log output in development mode to resemble bunyan-format's "short" mode, so that the visual experience remains familiar after migration.
+
+#### Acceptance Criteria
+1. While in development mode (`NODE_ENV !== 'production'`), the Logger Factory shall output each log line in the format: `HH:mm:ss.SSSZ LEVEL name: message` (e.g., `10:06:30.419Z DEBUG growi:service:page: some message`).
+2. The level label shall be right-aligned to 5 characters (e.g., `DEBUG`, ` INFO`, ` WARN`).
+3. The timestamp shall be UTC time-only in ISO 8601 format (`HH:mm:ss.SSSZ`), without date or surrounding brackets.
+4. The logger namespace (`name` field) shall appear directly after the level label, followed by a colon and the message, without parentheses.
+5. Log lines shall be colorized by level (cyan for DEBUG, green for INFO, yellow for WARN, red for ERROR).
+6. The bunyan-like format shall be implemented as a custom pino transport module within `@growi/logger`, so that `pino.transport()` can load it in a worker thread without function serialization issues.
+7. The bunyan-format transport module shall only be imported in development mode. In production, the module shall not be imported or bundled.
+8. While in production mode with `FORMAT_NODE_LOG` enabled, the Logger Factory shall use standard pino-pretty (not the bunyan-format transport) for formatted output.
+
+### Requirement 13: HTTP Logger Middleware Encapsulation
+
+**Objective:** As a developer, I want the HTTP request logging middleware encapsulated within `@growi/logger`, so that consumer applications do not need to depend on or import `pino-http` directly.
+
+#### Acceptance Criteria
+1. The `@growi/logger` package shall export a `createHttpLoggerMiddleware(options)` function that returns Express-compatible middleware for HTTP request logging.
+2. The middleware factory shall accept options for the logger namespace (defaulting to `'express'`) and optional `autoLogging` configuration (e.g., route ignore patterns).
+3. While in development mode, the middleware shall apply morgan-like formatting (custom success/error messages, custom log levels) via dynamic import. In production mode, the morgan-like format module shall not be imported; pino-http's default message format shall be used.
+4. After the encapsulation, `apps/app` and `apps/slackbot-proxy` shall not import `pino-http` directly; all HTTP logging shall go through `@growi/logger`.
+5. The `pino-http` dependency shall move from consumer applications to `@growi/logger`'s `dependencies`.
+6. The `morganLikeFormatOptions` module shall only be imported in development mode (dynamic import). In production, the module shall not be imported or bundled.
+7. The `pino-http` module shall be imported lazily inside the `createHttpLoggerMiddleware` function body (not at module top-level), so that bundlers (e.g., Turbopack, webpack) do not include the Node.js-only `pino-http` in browser bundles when `@growi/logger` is imported by shared/universal code.
+8. While in development mode with morgan-like formatting enabled, the HTTP log output shall suppress the verbose `req` and `res` serialized objects; the `customSuccessMessage` output (method, URL, status code, response time) is sufficient for development readability.
+9. While in development mode, the morgan-like format shall colorize the HTTP status code by range (2xx=green, 3xx=cyan, 4xx=yellow, 5xx=red) and dim the response time, respecting the `NO_COLOR` environment variable.

+ 224 - 0
.kiro/specs/migrate-logger-to-pino/research.md

@@ -0,0 +1,224 @@
+# Research & Design Decisions
+
+---
+**Purpose**: Capture discovery findings, architectural investigations, and rationale that inform the technical design.
+---
+
+## Summary
+- **Feature**: `migrate-logger-to-pino`
+- **Discovery Scope**: Complex Integration
+- **Key Findings**:
+  - Pino and bunyan share identical argument patterns (`logger.info(obj, msg)`) — no call-site changes needed
+  - No `logger.child()` or custom serializers used in GROWI — simplifies migration significantly
+  - `@opentelemetry/instrumentation-pino` supports pino `<10`; need to verify v9.x or v10 compatibility
+  - No off-the-shelf pino package replicates universal-bunyan's namespace-based level control; custom wrapper required
+
+## Research Log
+
+### Pino Core API Compatibility with Bunyan
+- **Context**: Need to confirm argument pattern compatibility to minimize call-site changes
+- **Sources Consulted**: pino GitHub docs (api.md), npm pino@10.3.1
+- **Findings**:
+  - Log level numeric values are identical: trace=10, debug=20, info=30, warn=40, error=50, fatal=60
+  - Method signature: `logger[level]([mergingObject], [message], [...interpolationValues])` — same as bunyan
+  - `name` option adds a `"name"` field to JSON output, same as bunyan
+  - `msg` is the default message key (same as bunyan), configurable via `messageKey`
+  - `pino.child(bindings, options)` works similarly to bunyan's `child()`
+- **Implications**: Call sites using `logger.info('msg')`, `logger.info({obj}, 'msg')`, `logger.error(err)` require no changes
+
+### Pino Browser Support
+- **Context**: universal-bunyan uses browser-bunyan + ConsoleFormattedStream for client-side logging
+- **Sources Consulted**: pino GitHub docs (browser.md)
+- **Findings**:
+  - Pino has built-in browser mode activated via package.json `browser` field
+  - Maps to console methods: `console.error` (fatal/error), `console.warn`, `console.info`, `console.debug`, `console.trace`
+  - `browser.asObject: true` outputs structured objects
+  - `browser.write` allows custom per-level handlers
+  - Level control works the same as Node.js (`level` option)
+  - No separate package needed (unlike browser-bunyan)
+- **Implications**: Eliminates browser-bunyan and @browser-bunyan/console-formatted-stream dependencies entirely
+
+### Pino-Pretty as Bunyan-Format Replacement
+- **Context**: universal-bunyan uses bunyan-format with `short` (dev) and `long` (prod) output modes
+- **Sources Consulted**: pino-pretty npm (v13.1.3)
+- **Findings**:
+  - Can be used as transport (worker thread) or stream (main thread)
+  - Short mode equivalent: `singleLine: true` + `ignore: 'pid,hostname'`
+  - Long mode equivalent: default multi-line output
+  - `translateTime: 'SYS:standard'` for human-readable timestamps
+  - TTY-only pattern: conditionally enable based on `process.stdout.isTTY`
+- **Implications**: Direct replacement for bunyan-format with equivalent modes
+
+### Pino-HTTP as Morgan/Express-Bunyan-Logger Replacement
+- **Context**: GROWI uses morgan (dev) and express-bunyan-logger (prod) for HTTP request logging
+- **Sources Consulted**: pino-http npm (v11.0.0)
+- **Findings**:
+  - Express middleware with `autoLogging.ignore` for route skipping (replaces morgan's `skip`)
+  - Accepts custom pino logger instance via `logger` option
+  - `customLogLevel` for status-code-based level selection
+  - `req.log` provides child logger with request context
+  - Replaces both morgan and express-bunyan-logger in a single package
+- **Implications**: Unified HTTP logging for both dev and prod, with route filtering support
+
+### Namespace-Based Level Control
+- **Context**: universal-bunyan provides namespace-to-level mapping with minimatch glob patterns and env var overrides
+- **Sources Consulted**: pino-debug (v4.0.2), pino ecosystem search
+- **Findings**:
+  - pino-debug bridges the `debug` module but doesn't provide general namespace-level control
+  - No official pino package replicates universal-bunyan's behavior
+  - Custom implementation needed: wrapper that caches pino instances per namespace, reads config + env vars, applies minimatch matching
+  - Can use pino's `level` option per-instance (set at creation time)
+- **Implications**: Must build `@growi/logger` package as a custom wrapper around pino, replacing universal-bunyan
+
+### OpenTelemetry Instrumentation
+- **Context**: GROWI has a custom DiagLogger adapter wrapping bunyan, and disables @opentelemetry/instrumentation-bunyan
+- **Sources Consulted**: @opentelemetry/instrumentation-pino npm (v0.59.0)
+- **Findings**:
+  - Supports pino `>=5.14.0 <10` — pino v10 may not be supported yet
+  - Provides trace correlation (trace_id, span_id injection) and log sending to OTel SDK
+  - GROWI's DiagLoggerBunyanAdapter pattern maps cleanly to pino (same method names)
+  - Current code disables bunyan instrumentation; equivalent disable for pino instrumentation may be needed
+- **Implications**: Pin pino to v9.x for OTel compatibility, or verify v10 support. DiagLogger adapter changes are minimal.
+
+### Existing Call-Site Analysis
+- **Context**: Need to understand what API surface is actually used to minimize migration risk
+- **Sources Consulted**: Codebase grep across all apps and packages
+- **Findings**:
+  - **No `logger.child()` usage** anywhere in the codebase
+  - **No custom serializers** registered or used
+  - **No `logger.fields` access** or other bunyan-specific APIs
+  - Call patterns: ~30% simple string, ~50% string+object, ~10% error-only, ~10% string+error
+  - All loggers created via `loggerFactory(name)` — single entry point
+- **Implications**: Migration is primarily a factory-level change; call sites need no modification
+
+## Architecture Pattern Evaluation
+
+| Option | Description | Strengths | Risks / Limitations | Notes |
+|--------|-------------|-----------|---------------------|-------|
+| Drop-in wrapper (`@growi/logger`) | Shared package providing `loggerFactory()` over pino with namespace/config/env support | Minimal call-site changes, single source of truth, testable in isolation | Must implement namespace matching (minimatch) | Mirrors universal-bunyan's role |
+| Direct pino usage per app | Each app creates pino instances directly | No wrapper overhead | Duplicated config logic, inconsistent behavior across apps | Rejected: violates Req 8 |
+| pino-debug bridge | Use pino-debug for namespace control | Leverages existing package | Only works with `debug()` calls, not general logging | Rejected: wrong abstraction |
+
+## Design Decisions
+
+### Decision: Create `@growi/logger` as Shared Package
+- **Context**: universal-bunyan is a custom wrapper; need equivalent for pino
+- **Alternatives Considered**:
+  1. Direct pino usage in each app — too much duplication
+  2. Fork/patch universal-bunyan for pino — complex, hard to maintain
+  3. New shared package `@growi/logger` — clean, purpose-built
+- **Selected Approach**: New `@growi/logger` package in `packages/logger/`
+- **Rationale**: Single source of truth, testable, follows monorepo patterns (like @growi/core)
+- **Trade-offs**: One more package to maintain, but replaces external dependency
+- **Follow-up**: Define package exports, ensure tree-shaking for browser builds
+
+### Decision: Pin Pino to v9.x for OpenTelemetry Compatibility
+- **Context**: @opentelemetry/instrumentation-pino supports `<10`
+- **Alternatives Considered**:
+  1. Use pino v10 and skip OTel auto-instrumentation — loses correlation
+  2. Use pino v9 for compatibility — safe choice
+  3. Use pino v10 and verify latest instrumentation support — risky
+- **Selected Approach**: Start with pino v9.x; upgrade to v10 when OTel adds support
+- **Rationale**: OTel trace correlation is valuable for production observability
+- **Trade-offs**: Miss latest pino features temporarily
+- **Follow-up**: Monitor @opentelemetry/instrumentation-pino releases for v10 support
+
+### Decision: Use pino-pretty as Transport in Development
+- **Context**: Need human-readable output for dev, JSON for prod
+- **Alternatives Considered**:
+  1. pino-pretty as transport (worker thread) — standard approach
+  2. pino-pretty as sync stream — simpler but blocks main thread
+- **Selected Approach**: Transport for async dev logging; raw JSON in production
+- **Rationale**: Transport keeps main thread clear; dev perf is less critical but the pattern is correct
+- **Trade-offs**: Slightly more complex setup
+- **Follow-up**: Verify transport works correctly with Next.js dev server
+
+### Decision: Unified HTTP Logging with pino-http
+- **Context**: Currently uses morgan (dev) and express-bunyan-logger (prod) — two different middlewares
+- **Alternatives Considered**:
+  1. Separate dev/prod middleware (maintain split) — unnecessary complexity
+  2. Single pino-http middleware for both — clean, consistent
+- **Selected Approach**: pino-http with route filtering replaces both
+- **Rationale**: Single middleware, consistent output format, built-in request context
+- **Trade-offs**: Dev output slightly different from morgan's compact format (mitigated by pino-pretty)
+- **Follow-up**: Configure `autoLogging.ignore` for `/_next/static/` paths
+
+## Risks & Mitigations
+- **OTel instrumentation compatibility with pino version** — Mitigated by pinning to v9.x
+- **Browser bundle size increase** — Pino browser mode is lightweight; monitor with build metrics
+- **Subtle log format differences** — Acceptance test comparing output before/after
+- **Missing env var behavior** — Port minimatch logic carefully with unit tests
+- **Express middleware ordering** — Ensure pino-http is added at the same point in middleware chain
+
+### Phase 2: Formatting Improvement Research
+
+#### pino-http Custom Message API (v11.0.0)
+- **Context**: Need morgan-like concise HTTP log messages instead of pino-http's verbose default
+- **Sources Consulted**: pino-http v11.0.0 type definitions (index.d.ts), source code (logger.js)
+- **Findings**:
+  - `customSuccessMessage: (req: IM, res: SR, responseTime: number) => string` — called on successful response (statusCode < 500)
+  - `customErrorMessage: (req: IM, res: SR, error: Error) => string` — called on error response
+  - `customReceivedMessage: (req: IM, res: SR) => string` — called when request received (optional, only if autoLogging enabled)
+  - `customLogLevel: (req: IM, res: SR, error?: Error) => LevelWithSilent` — dynamic log level based on status code
+  - `customSuccessObject: (req, res, val) => any` — custom fields for successful response log
+  - `customErrorObject: (req, res, error, val) => any` — custom fields for error response log
+  - `customAttributeKeys: { req?, res?, err?, reqId?, responseTime? }` — rename default keys
+  - Response time is calculated as `Date.now() - res[startTime]` in milliseconds
+  - Error conditions: error passed to handler, `res.err` set, or `res.statusCode >= 500`
+- **Implications**: `customSuccessMessage` + `customErrorMessage` + `customLogLevel` are sufficient to achieve morgan-like output format
+
+#### pino-pretty singleLine Option
+- **Context**: User wants one-liner readable logs when FORMAT_NODE_LOG=true
+- **Sources Consulted**: pino-pretty v13.x documentation
+- **Findings**:
+  - `singleLine: true` forces all log properties onto a single line
+  - `singleLine: false` (default) outputs properties on separate indented lines
+  - Combined with `ignore: 'pid,hostname'`, singleLine produces concise output
+  - The `messageFormat` option can further customize the format string
+- **Implications**: Changing `singleLine` from `false` to `true` in the production FORMAT_NODE_LOG path directly addresses the user's readability concern
+
+#### FORMAT_NODE_LOG Default Semantics Analysis
+- **Context**: `isFormattedOutputEnabled()` returns `true` when env var is unset; production JSON depends on `.env.production`
+- **Analysis**:
+  - `.env.production` sets `FORMAT_NODE_LOG=false` — this is the mechanism that ensures JSON in production
+  - CI sets `FORMAT_NODE_LOG=true` explicitly — not affected by default change
+  - If `.env.production` fails to load in a Docker override scenario, production would silently get pino-pretty
+  - However, inverting the default is a behavioral change with broader implications
+- **Decision**: Defer to separate PR. Current behavior is correct in practice (`.env.production` always loaded by Next.js dotenv-flow).
+
+## Phase 3: Implementation Discoveries
+
+### Browser Bundle Compatibility — pino-http Top-Level Import
+- **Context**: `pino-http` was initially imported at the module top-level in `http-logger.ts`. This caused Turbopack to include the Node.js-only module in browser bundles, producing `TypeError: __turbopack_context__.r(...).symbols is undefined`.
+- **Root cause**: `@growi/logger` is imported by shared page code that runs in both browser and server contexts. Any top-level import of a Node.js-only module (like pino-http) gets pulled into the browser bundle.
+- **Fix**: Move the `pino-http` import inside the async function body using dynamic import: `const { default: pinoHttp } = await import('pino-http')`. This defers the import to runtime when the function is actually called (server-side only).
+- **Pattern**: This is the standard pattern for Node.js-only modules in packages shared with browser code. Apply the same treatment to any future Node.js-only additions to `@growi/logger`.
+
+### Dev-Only Module Physical Isolation (`src/dev/`)
+- **Context**: `bunyan-format.ts` (custom pino transport) and `morgan-like-format-options.ts` were initially placed at `src/transports/` and `src/` root respectively, mixed with production modules.
+- **Problem**: No clear boundary between dev-only and production-safe modules; risk of accidentally importing dev modules in production paths.
+- **Fix**: Created `src/dev/` directory as the explicit boundary for development-only modules. `TransportFactory` references `./dev/bunyan-format.js` only in the dev branch — the path is never constructed in production code paths.
+- **Vite config**: `preserveModules: true` ensures `src/dev/bunyan-format.ts` builds to `dist/dev/bunyan-format.js` with the exact path that `pino.transport({ target: ... })` references at runtime.
+
+### Single Worker Thread Model — Critical Implementation Detail
+- **Context**: Initial implementation called `pino.transport()` inside `loggerFactory(name)`, spawning a new Worker thread for each namespace.
+- **Fix**: Refactored so `pino.transport()` is called **once** in `initializeLoggerFactory`, and `loggerFactory(name)` calls `rootLogger.child({ name })` to create namespace-bound loggers sharing the single Worker thread.
+- **Root logger level**: Must be set to `'trace'` (not `'info'`) so child loggers can independently set their resolved level without being silenced by the root. If the root is `'info'`, a child with `level: 'debug'` will still be filtered at the root level.
+- **Constraint for future changes**: Never call `pino.transport()` or `pino()` inside `loggerFactory()`. All transport setup belongs in `initializeLoggerFactory()`.
+
+### pino Logger Type Compatibility with pino-http
+- **Context**: `loggerFactory()` returned `pino.Logger<never>` (the default), which is not assignable to pino-http's expected `Logger` type.
+- **Fix**: Export `Logger<string>` from `@growi/logger` and type `loggerFactory` to return `Logger<string>`. This is compatible with pino-http's `logger` option.
+- **Why `<string>` not `<never>`**: pino's default generic `CustomLevels` is `never`, which makes the type incompatible with APIs expecting custom levels to potentially be strings. `Logger<string>` is the correct type for external APIs.
+
+### `@growi/logger` Package Visibility
+- **Decision**: `"private": true` is correct and intentional.
+- **Rationale**: All consumers (`apps/app`, `apps/slackbot-proxy`, `packages/slack`, etc.) are monorepo-internal packages that reference `@growi/logger` via `workspace:*` protocol. The `private` flag only prevents npm publish, not workspace usage. `@growi/logger` is logging infrastructure — there is no reason to expose it externally (unlike `@growi/core` or `@growi/pluginkit` which are published for external plugin developers).
+
+## References
+- [pino API docs](https://github.com/pinojs/pino/blob/main/docs/api.md)
+- [pino browser docs](https://github.com/pinojs/pino/blob/main/docs/browser.md)
+- [pino-pretty npm](https://www.npmjs.com/package/pino-pretty)
+- [pino-http npm](https://www.npmjs.com/package/pino-http)
+- [@opentelemetry/instrumentation-pino](https://www.npmjs.com/package/@opentelemetry/instrumentation-pino)
+- [universal-bunyan source](https://github.com/weseek/universal-bunyan) — current implementation reference

+ 23 - 0
.kiro/specs/migrate-logger-to-pino/spec.json

@@ -0,0 +1,23 @@
+{
+  "feature_name": "migrate-logger-to-pino",
+  "created_at": "2026-03-23T00:00:00.000Z",
+  "updated_at": "2026-04-06T00:00:00.000Z",
+  "language": "en",
+  "phase": "implementation-complete",
+  "cleanup_completed": true,
+  "approvals": {
+    "requirements": {
+      "generated": true,
+      "approved": true
+    },
+    "design": {
+      "generated": true,
+      "approved": true
+    },
+    "tasks": {
+      "generated": true,
+      "approved": false
+    }
+  },
+  "ready_for_implementation": true
+}

+ 263 - 0
.kiro/specs/migrate-logger-to-pino/tasks.md

@@ -0,0 +1,263 @@
+# Implementation Plan
+
+- [x] 1. Scaffold the @growi/logger shared package
+- [x] 1.1 Initialize the package directory, package.json, and TypeScript configuration within the monorepo packages directory
+  - Create the workspace entry as `@growi/logger` with pino v9.x and minimatch as dependencies, pino-pretty as an optional peer dependency
+  - Configure TypeScript with strict mode, ESM output, and appropriate path aliases
+  - Set up the package entry points (main, types, browser) so that bundlers resolve the correct build for Node.js vs browser
+  - Add vitest configuration for unit testing within the package
+  - _Requirements: 8.5_
+
+- [x] 1.2 Define the shared type contracts and configuration interface
+  - Define the `LoggerConfig` type representing a namespace-pattern-to-level mapping (including a `default` key)
+  - Define the `LoggerFactoryOptions` type accepted by the initialization function
+  - Export the pino `Logger` type so consumers can type-annotate their logger variables without importing pino directly
+  - _Requirements: 10.3_
+
+- [x] 2. Implement environment variable parsing and level resolution
+- [x] 2.1 (P) Build the environment variable parser
+  - Read the six log-level environment variables (`DEBUG`, `TRACE`, `INFO`, `WARN`, `ERROR`, `FATAL`) from the process environment
+  - Split each variable's value by commas and trim whitespace to extract individual namespace patterns
+  - Return a flat config map where each namespace pattern maps to its corresponding level string
+  - Handle edge cases: empty values, missing variables, duplicate patterns (last wins)
+  - Write unit tests covering: single variable with multiple patterns, all six variables set, no variables set, whitespace handling
+  - _Requirements: 3.1, 3.4, 3.5_
+
+- [x] 2.2 (P) Build the level resolver with glob pattern matching
+  - Accept a namespace string, a config map, and an env-override map; return the resolved level
+  - Check env-override map first (using minimatch for glob matching), then config map, then fall back to the config `default` entry
+  - When multiple patterns match, prefer the most specific (longest non-wildcard prefix) match
+  - Write unit tests covering: exact match, glob wildcard match, env override precedence over config, fallback to default, no matching pattern
+  - _Requirements: 2.1, 2.3, 2.4, 3.2, 3.3_
+
+- [x] 3. Implement the transport factory for dev, prod, and browser environments
+- [x] 3.1 (P) Build the Node.js transport configuration
+  - In development mode, produce pino-pretty transport options with human-readable timestamps, hidden pid/hostname fields, and multi-line output
+  - In production mode, produce raw JSON output to stdout by default
+  - When the `FORMAT_NODE_LOG` environment variable is unset or truthy in production, produce pino-pretty transport options with long-format output instead of raw JSON
+  - Include the logger namespace (`name` field) in all output configurations
+  - Write unit tests verifying correct options for each combination of NODE_ENV and FORMAT_NODE_LOG
+  - _Requirements: 5.1, 5.2, 5.3, 5.4_
+
+- [x] 3.2 (P) Build the browser transport configuration
+  - Detect the browser environment using window/document checks
+  - In browser development mode, produce pino browser options that output to the developer console with the resolved namespace level
+  - In browser production mode, produce pino browser options that default to `error` level to suppress non-critical console output
+  - Write unit tests verifying browser options for dev and prod scenarios
+  - _Requirements: 4.1, 4.2, 4.3, 4.4_
+
+- [x] 4. Implement the logger factory with caching and platform detection
+- [x] 4.1 Build the initialization and factory functions
+  - Implement `initializeLoggerFactory(options)` that stores the merged configuration, pre-parses environment overrides, and prepares the transport config
+  - Implement `loggerFactory(name)` that checks the cache for an existing logger, resolves the level via the level resolver, creates a pino instance with appropriate transport options, caches it, and returns it
+  - Detect the runtime platform (Node.js vs browser) and apply the corresponding transport configuration from the transport factory
+  - Ensure the module exports `loggerFactory` as the default export and `initializeLoggerFactory` as a named export for backward compatibility with existing import patterns
+  - Write unit tests covering: cache hit returns same instance, different namespaces return different instances, initialization stores config correctly
+  - _Requirements: 1.1, 1.2, 1.3, 1.4, 4.1, 10.1_
+
+- [x] 5. Migrate shared packages to @growi/logger (small scope first)
+- [x] 5.1 (P) Update packages/slack logger to use @growi/logger
+  - Replace the logger factory implementation to import from `@growi/logger` instead of universal-bunyan
+  - Update the inline config (`{ default: 'info' }`) to use the @growi/logger initialization pattern
+  - Replace bunyan type imports with the @growi/logger Logger type
+  - Add `@growi/logger` to packages/slack dependencies
+  - Run TypeScript compilation to verify no type errors
+  - _Requirements: 8.3_
+
+- [x] 5.2 (P) Update packages/remark-attachment-refs logger to use @growi/logger
+  - Replace the logger factory implementation to import from `@growi/logger`
+  - Update configuration and type imports to match the new package
+  - Add `@growi/logger` to packages/remark-attachment-refs dependencies
+  - Run TypeScript compilation to verify no type errors
+  - _Requirements: 8.4_
+
+- [x] 5.3 Fix pino-style logger call sites in packages/slack
+  - In the following files, convert all `logger.method('message', obj)` calls to the pino-canonical form `logger.method({ obj }, 'message')` (object first, message second)
+  - `src/middlewares/verify-growi-to-slack-request.ts` (lines 25, 34)
+  - `src/middlewares/verify-slack-request.ts` (lines 25, 36, 45, 76)
+  - `src/utils/interaction-payload-accessor.ts` (line 104)
+  - Run `pnpm --filter @growi/slack lint:typecheck` and confirm zero TS2769 errors
+  - _Requirements: 10.1_
+
+- [x] 5.4 Fix pino-style logger call site in packages/remark-attachment-refs
+  - In `src/client/services/renderer/refs.ts` (line 107), convert `logger.debug('message', attributes)` to `logger.debug({ attributes }, 'message')`
+  - Run `pnpm --filter @growi/remark-attachment-refs lint:typecheck` and confirm the TS2769 error is gone
+  - _Requirements: 10.1_
+
+- [x] 5.5 Migrate packages/remark-lsx server routes to use @growi/logger
+  - Add `@growi/logger` to packages/remark-lsx dependencies
+  - Create `src/utils/logger/index.ts` following the same pattern as remark-attachment-refs (import from `@growi/logger`, call `initializeLoggerFactory`, re-export `loggerFactory`)
+  - Replace `console.error` calls in `src/server/routes/list-pages/index.ts` (lines 89, 145-148) with proper logger calls using `loggerFactory('growi:remark-lsx:routes:list-pages')`
+  - Remove the `biome-ignore lint/suspicious/noConsole` comments from the replaced call sites
+  - Run `pnpm --filter @growi/remark-lsx lint:typecheck` to confirm no type errors
+  - _Requirements: 8.5_
+
+- [x] 6. Migrate apps/slackbot-proxy to @growi/logger
+- [x] 6.1 Replace the logger factory and HTTP middleware in slackbot-proxy
+  - Update the slackbot-proxy logger utility to import from `@growi/logger` and call `initializeLoggerFactory` with its existing dev/prod config
+  - Replace express-bunyan-logger and morgan usage in the server setup with pino-http middleware
+  - Replace all `import type Logger from 'bunyan'` references with the @growi/logger Logger type
+  - Add `@growi/logger` and `pino-http` to slackbot-proxy dependencies
+  - Run TypeScript compilation to verify no type errors
+  - _Requirements: 8.2, 6.1_
+
+- [x] 6.6 Fix pino-style logger call sites in apps/slackbot-proxy
+  - In the following files, convert all `logger.method('message', obj)` calls to `logger.method({ obj }, 'message')`
+  - `src/controllers/growi-to-slack.ts` (lines 109, 179, 231, 243, 359)
+  - `src/controllers/slack.ts` (lines 388, 586)
+  - `src/services/RegisterService.ts` (line 165)
+  - Run `pnpm --filter @growi/slackbot-proxy lint:typecheck` and confirm zero TS2769 errors
+  - _Requirements: 10.1_
+
+- [x] 6.7 Fix @growi/logger Logger type export and remove `as any` cast in slackbot-proxy
+  - In `packages/logger`, update the `loggerFactory` return type so it is compatible with `pino-http`'s `logger` option (i.e., `pino.Logger` without `<never>` narrowing, or by exporting `Logger<string>`)
+  - After the type export is fixed, remove the `as any` cast from `apps/slackbot-proxy/src/Server.ts` (line 166) and the associated `biome-ignore` comment
+  - Run `pnpm --filter @growi/slackbot-proxy lint:typecheck` to confirm no residual type errors
+  - _Requirements: 10.3_
+
+- [x] 6.5 Fix logger factory to preserve pino's single-worker-thread performance model
+  - Refactor `initializeLoggerFactory` to create the pino transport (`pino.transport()`) and root pino logger **once**, storing them in module scope
+  - Set the root logger's level to `'trace'` so that individual child loggers can apply their own resolved level without being silenced by the root
+  - Refactor `loggerFactory(name)` to call `rootLogger.child({ name })` and then set `childLogger.level = resolvedLevel` instead of calling `pino()` + `pino.transport()` per namespace
+  - Handle browser mode separately: the root browser logger is created once in `initializeLoggerFactory`; `loggerFactory` still calls `.child({ name })` and applies the resolved level
+  - Update unit tests in `logger-factory.spec.ts` to verify that calling `loggerFactory` for N distinct namespaces does not create N independent pino instances (all children share the root transport)
+  - _Requirements: 11.1, 11.2, 11.3, 11.4_
+
+- [x] 7. Migrate apps/app to @growi/logger (largest scope)
+- [x] 7.1 Replace the logger factory module in apps/app
+  - Update the apps/app logger utility to import from `@growi/logger` instead of `universal-bunyan`
+  - Call `initializeLoggerFactory` at application startup with the existing dev/prod config files (preserve current config content)
+  - Re-export `loggerFactory` as the default export so all existing consumer imports continue to work unchanged
+  - Add `@growi/logger` to apps/app dependencies and ensure pino-pretty is available for development formatting
+  - _Requirements: 8.1, 2.2_
+
+- [x] 7.2 Replace HTTP request logging middleware in apps/app
+  - Remove the morgan middleware (development mode) and express-bunyan-logger middleware (production mode) from the Express initialization
+  - Add pino-http middleware configured with a logger from the factory using the `express` namespace
+  - Configure route skipping to exclude `/_next/static/` paths in non-production mode
+  - Verify the middleware produces log entries containing method, URL, status code, and response time
+  - _Requirements: 6.1, 6.2, 6.3, 6.4_
+
+- [x] 7.3 Update the OpenTelemetry diagnostic logger adapter
+  - Rename the adapter class from `DiagLoggerBunyanAdapter` to `DiagLoggerPinoAdapter` and update the import to use pino types
+  - Preserve the existing `parseMessage` helper logic that parses JSON strings and merges argument objects
+  - Confirm the verbose-to-trace level mapping continues to work with pino's trace level
+  - Update the OpenTelemetry SDK configuration to disable `@opentelemetry/instrumentation-pino` instead of `@opentelemetry/instrumentation-bunyan`
+  - _Requirements: 7.1, 7.2, 7.3_
+
+- [x] 7.4 Update all bunyan type references in apps/app source files
+  - Replace `import type Logger from 'bunyan'` with the Logger type exported from `@growi/logger` across all source files in apps/app
+  - Verify that pino's Logger type is compatible with all existing usage patterns (info, debug, warn, error, trace, fatal method calls)
+  - Run the TypeScript compiler to confirm no type errors
+  - _Requirements: 10.1, 10.2, 10.3_
+
+- [x] 8. Remove old logging dependencies and verify cleanup
+- [x] 8.1 Remove bunyan-related packages from all package.json files
+  - Remove `bunyan`, `universal-bunyan`, `bunyan-format`, `express-bunyan-logger`, `browser-bunyan`, `@browser-bunyan/console-formatted-stream`, `@types/bunyan` from every package.json in the monorepo
+  - Remove `morgan` and `@types/morgan` from every package.json in the monorepo
+  - Run `pnpm install` to update the lockfile and verify no broken peer dependency warnings
+  - _Requirements: 9.1, 9.2_
+
+- [x] 8.2 Verify no residual references to removed packages
+  - Search all source files for any remaining imports or requires of the removed packages (bunyan, universal-bunyan, browser-bunyan, express-bunyan-logger, morgan, bunyan-format)
+  - Search all configuration and type definition files for stale bunyan references
+  - Fix any remaining references found during the search
+  - _Requirements: 9.3_
+
+- [x] 9. Run full monorepo validation
+- [x] 9.1 Execute lint, type-check, test, and build across the monorepo
+  - Run `turbo run lint --filter @growi/app` and fix any lint errors related to the migration
+  - Run `turbo run test --filter @growi/app` and verify all existing tests pass
+  - Run `turbo run build --filter @growi/app` and confirm the production build succeeds
+  - Run the same checks for slackbot-proxy and any other affected packages
+  - Verify the @growi/logger package's own tests pass
+  - _Requirements: 1.4, 8.1, 8.2, 8.3, 8.4, 10.1, 10.2_
+
+- [x] 10. Improve log output formatting for readability
+- [x] 10.1 (P) Differentiate pino-pretty singleLine between dev and production FORMAT_NODE_LOG
+  - In the transport factory, change the production + FORMAT_NODE_LOG path to use `singleLine: true` for concise one-liner output
+  - Keep the development path at `singleLine: false` so developers see full multi-line context
+  - Update unit tests to verify: dev returns `singleLine: false`, production + FORMAT_NODE_LOG returns `singleLine: true`, production without FORMAT_NODE_LOG still returns no transport
+  - _Requirements: 5.1, 5.3_
+
+- [x] 10.2 (P) Add morgan-like HTTP request message formatting to pino-http in apps/app
+  - Configure `customSuccessMessage` to produce `METHOD /url STATUS - TIMEms` format (e.g., `GET /page/path 200 - 12ms`)
+  - Configure `customErrorMessage` to include the error message alongside method, URL, and status code
+  - Configure `customLogLevel` to return `warn` for 4xx responses and `error` for 5xx or error responses, keeping `info` for successful requests
+  - Verify that `/_next/static/` path skipping in dev mode still works after the changes
+  - _Requirements: 6.1, 6.4_
+
+- [x] 10.3 (P) Add morgan-like HTTP request message formatting to pino-http in apps/slackbot-proxy
+  - Apply the same `customSuccessMessage`, `customErrorMessage`, and `customLogLevel` configuration as apps/app
+  - _Requirements: 6.1, 6.4_
+
+- [x] 11. Validate formatting improvements
+- [x] 11.1 Run tests and build for affected packages
+  - Run the @growi/logger package tests to confirm transport factory changes pass
+  - Run lint and type-check for apps/app and apps/slackbot-proxy
+  - Verify the production build succeeds
+  - _Requirements: 5.1, 5.3, 6.1, 6.4_
+
+- [x] 12. Implement bunyan-like output format (development only)
+- [x] 12.1 Create the bunyan-format custom transport module
+  - Create `packages/logger/src/transports/bunyan-format.ts` that default-exports a function returning a pino-pretty stream
+  - Use `customPrettifiers.time` to format epoch as `HH:mm:ss.SSSZ` (UTC time-only, no brackets)
+  - Use `customPrettifiers.level` to return `${label.padStart(5)} ${log.name}` (right-aligned 5-char level + namespace)
+  - Set `ignore: 'pid,hostname,name'` so name appears via the level prettifier, not in pino-pretty's default parens
+  - Accept `singleLine` option to pass through to pino-pretty
+  - Verify the module is built to `dist/transports/bunyan-format.js` by vite's `preserveModules` config
+  - _Requirements: 12.1, 12.2, 12.3, 12.4, 12.5, 12.6_
+
+- [x] 12.2 Update TransportFactory to use bunyan-format transport in dev only
+  - In the **development** branch of `createNodeTransportOptions`, change the transport target from `'pino-pretty'` to the resolved path of `bunyan-format.js` (via `import.meta.url`)
+  - Remove `translateTime` and `ignore` options from the dev transport config (now handled inside the custom transport)
+  - Pass `singleLine: false` for dev
+  - In the **production + FORMAT_NODE_LOG** branch, keep `target: 'pino-pretty'` with standard options (`translateTime: 'SYS:standard'`, `ignore: 'pid,hostname'`, `singleLine: true`) — do NOT use bunyan-format
+  - The bunyan-format module path is only resolved in the dev code path, ensuring it is never imported in production
+  - Update unit tests in `transport-factory.spec.ts`: dev target contains `bunyan-format`; prod + FORMAT_NODE_LOG target is `'pino-pretty'`
+  - _Requirements: 12.1, 12.6, 12.7, 12.8_
+
+- [x] 12.3 Verify bunyan-format output
+  - Run the dev server and confirm log output matches the bunyan-format "short" style: `HH:mm:ss.SSSZ LEVEL name: message`
+  - Confirm colorization works (DEBUG=cyan, INFO=green, WARN=yellow, ERROR=red)
+  - Confirm multi-line output in dev (extra fields on subsequent lines)
+  - _Requirements: 12.1, 12.2, 12.3, 12.4, 12.5_
+
+- [x] 13. Encapsulate pino-http in @growi/logger
+- [x] 13.1 Create HTTP logger middleware factory in @growi/logger
+  - Create `packages/logger/src/http-logger.ts` exporting `async createHttpLoggerMiddleware(options?)`
+  - The function creates `pinoHttp` middleware internally with `loggerFactory(namespace)`
+  - In development mode (`NODE_ENV !== 'production'`): dynamically import `morganLikeFormatOptions` via `await import('./morgan-like-format-options')` and apply to pino-http options
+  - In production mode: use pino-http with default message formatting (no morgan-like module imported)
+  - Accept optional `namespace` (default: `'express'`) and `autoLogging` options
+  - Handle the `Logger<string>` → pino-http's expected Logger type assertion internally
+  - Add `pino-http` to `@growi/logger` package.json dependencies
+  - Export `createHttpLoggerMiddleware` from `packages/logger/src/index.ts`
+  - _Requirements: 13.1, 13.2, 13.3, 13.5, 13.6_
+
+- [x] 13.2 (P) Migrate apps/app to use createHttpLoggerMiddleware
+  - Replace the direct `pinoHttp` import and configuration in `apps/app/src/server/crowi/index.ts` with `await createHttpLoggerMiddleware(...)` from `@growi/logger`
+  - Pass the `/_next/static/` autoLogging ignore function via the options
+  - Remove `pino-http` and its type imports from the file
+  - Remove `morganLikeFormatOptions` import (now applied internally in dev only)
+  - Remove `pino-http` from `apps/app/package.json` if no longer directly used
+  - Run `pnpm --filter @growi/app lint:typecheck` to confirm no type errors
+  - _Requirements: 13.4_
+
+- [x] 13.3 (P) Migrate apps/slackbot-proxy to use createHttpLoggerMiddleware
+  - Replace the direct `pinoHttp` import and configuration in `apps/slackbot-proxy/src/Server.ts` with `await createHttpLoggerMiddleware(...)` from `@growi/logger`
+  - Remove `pino-http` and its type imports from the file
+  - Remove `morganLikeFormatOptions` import (now applied internally in dev only)
+  - Remove the `as unknown as` type assertion (now handled internally)
+  - Remove `pino-http` from `apps/slackbot-proxy/package.json` if no longer directly used
+  - Run `pnpm --filter @growi/slackbot-proxy lint:typecheck` to confirm no type errors
+  - _Requirements: 13.4_
+
+- [x] 14. Validate bunyan-format and HTTP encapsulation
+- [x] 14.1 Run full validation
+  - Run `@growi/logger` package tests
+  - Run lint and type-check for apps/app and apps/slackbot-proxy
+  - Run `turbo run build --filter @growi/app` to verify production build succeeds
+  - Verify no remaining direct `pino-http` imports in apps/app or apps/slackbot-proxy source files
+  - Verify that bunyan-format transport and morganLikeFormatOptions are NOT imported in production (grep for dynamic import pattern)
+  - _Requirements: 12.1, 12.6, 12.7, 13.4, 13.5, 13.6_

+ 1 - 4
apps/app/.claude/skills/build-optimization/SKILL.md

@@ -27,16 +27,13 @@ user-invocable: false
 
 ### Resolve Aliases (`turbopack.resolveAlias`)
 
-7 server-only packages + `fs` are aliased to `./src/lib/empty-module.ts` in browser context:
+4 server-only packages + `fs` are aliased to `./src/lib/empty-module.ts` in browser context:
 
 | Package | Reason |
 |---------|--------|
 | `fs` | Node.js built-in, not available in browser |
-| `dtrace-provider` | Native module, server-only |
 | `mongoose` | MongoDB driver, server-only |
 | `i18next-fs-backend` | File-system i18n loader, server-only |
-| `bunyan` | Server-side logger |
-| `bunyan-format` | Server-side logger formatter |
 | `core-js` | Server-side polyfills |
 
 - Uses conditional `{ browser: './src/lib/empty-module.ts' }` syntax so server-side resolution is unaffected

+ 2 - 0
apps/app/.gitignore

@@ -11,6 +11,8 @@ next.config.js
 /build/
 /dist/
 /transpiled/
+/config/**/*.js
+/config/**/*.d.ts
 /public/static/fonts
 /public/static/js
 /public/static/styles

+ 14 - 3
apps/app/bin/postbuild-server.ts

@@ -1,19 +1,25 @@
 /**
  * Post-build script for server compilation.
  *
- * tspc compiles both `src/` and `config/` (which will be migrated to TypeScript),
+ * tspc compiles both `src/` and `config/` (TypeScript files under config/),
  * so the output directory (`transpiled/`) mirrors the source tree structure
  * (e.g. `transpiled/src/`, `transpiled/config/`).
  *
  * Setting `rootDir: "src"` and `outDir: "dist"` in tsconfig would eliminate this script,
  * but that would break once `config/` is included in the compilation.
- * Instead, this script extracts only `transpiled/src/` into `dist/` and discards the rest.
+ *
+ * This script:
+ * 1. Extracts `transpiled/src/` into `dist/`
+ * 2. Copies compiled `transpiled/config/` files into `config/` so that
+ *    relative imports from `dist/` (e.g. `../../../config/logger/config.dev`)
+ *    resolve correctly at runtime.
  */
-import { readdirSync, renameSync, rmSync } from 'node:fs';
+import { cpSync, existsSync, readdirSync, renameSync, rmSync } from 'node:fs';
 
 const TRANSPILED_DIR = 'transpiled';
 const DIST_DIR = 'dist';
 const SRC_SUBDIR = `${TRANSPILED_DIR}/src`;
+const CONFIG_SUBDIR = `${TRANSPILED_DIR}/config`;
 
 // List transpiled contents for debugging
 // biome-ignore lint/suspicious/noConsole: This is a build script, console output is expected.
@@ -27,5 +33,10 @@ rmSync(DIST_DIR, { recursive: true, force: true });
 // Move transpiled/src -> dist
 renameSync(SRC_SUBDIR, DIST_DIR);
 
+// Copy compiled config files to app root config/ so runtime imports resolve
+if (existsSync(CONFIG_SUBDIR)) {
+  cpSync(CONFIG_SUBDIR, 'config', { recursive: true, force: true });
+}
+
 // Remove leftover transpiled directory
 rmSync(TRANSPILED_DIR, { recursive: true, force: true });

+ 5 - 1
apps/app/config/logger/config.dev.js → apps/app/config/logger/config.dev.ts

@@ -1,4 +1,6 @@
-module.exports = {
+import type { LoggerConfig } from '@growi/logger';
+
+const config: LoggerConfig = {
   default: 'info',
 
   // 'express-session': 'debug',
@@ -47,3 +49,5 @@ module.exports = {
   'growi:service:openai': 'debug',
   'growi:middleware:access-token-parser:access-token': 'debug',
 };
+
+export default config;

+ 5 - 1
apps/app/config/logger/config.prod.js → apps/app/config/logger/config.prod.ts

@@ -1,6 +1,10 @@
-module.exports = {
+import type { LoggerConfig } from '@growi/logger';
+
+const config: LoggerConfig = {
   default: 'info',
 
   'growi:routes:login-passport': 'debug',
   'growi:service:PassportService': 'debug',
 };
+
+export default config;

+ 0 - 3
apps/app/next.config.ts

@@ -133,11 +133,8 @@ const nextConfig: NextConfig = {
       // Exclude fs from client bundle
       fs: { browser: './src/lib/empty-module.ts' },
       // Exclude server-only packages from client bundle
-      'dtrace-provider': { browser: './src/lib/empty-module.ts' },
       mongoose: { browser: './src/lib/empty-module.ts' },
       'i18next-fs-backend': { browser: './src/lib/empty-module.ts' },
-      bunyan: { browser: './src/lib/empty-module.ts' },
-      'bunyan-format': { browser: './src/lib/empty-module.ts' },
       'core-js': { browser: './src/lib/empty-module.ts' },
     },
   },

+ 1 - 7
apps/app/package.json

@@ -66,7 +66,6 @@
     "@azure/identity": "^4.4.1",
     "@azure/openai": "^2.0.0",
     "@azure/storage-blob": "^12.16.0",
-    "@browser-bunyan/console-formatted-stream": "^1.8.0",
     "@codemirror/autocomplete": "^6.18.4",
     "@codemirror/commands": "^6.8.0",
     "@codemirror/lang-markdown": "^6.3.2",
@@ -83,6 +82,7 @@
     "@google-cloud/storage": "^5.8.5",
     "@growi/core": "workspace:^",
     "@growi/emoji-mart-data": "workspace:^",
+    "@growi/logger": "workspace:*",
     "@growi/pdf-converter-client": "workspace:^",
     "@growi/pluginkit": "workspace:^",
     "@growi/presentation": "workspace:^",
@@ -126,9 +126,7 @@
     "babel-plugin-superjson-next": "^0.4.2",
     "body-parser": "^1.20.3",
     "bootstrap": "^5.3.8",
-    "browser-bunyan": "^1.8.0",
     "bson-objectid": "^2.0.4",
-    "bunyan": "^1.8.15",
     "cm6-theme-basic-light": "^0.2.0",
     "codemirror": "^6.0.1",
     "compression": "^1.7.4",
@@ -150,7 +148,6 @@
     "ejs": "^3.1.10",
     "expose-gc": "^1.0.0",
     "express": "^4.20.0",
-    "express-bunyan-logger": "^1.3.3",
     "express-mongo-sanitize": "^2.1.0",
     "express-session": "^1.16.1",
     "express-validator": "^6.14.0",
@@ -275,7 +272,6 @@
     "uid-safe": "^2.1.5",
     "unified": "^11.0.0",
     "unist-util-visit": "^5.0.0",
-    "universal-bunyan": "^0.9.2",
     "unstated": "^2.1.1",
     "unzip-stream": "^0.3.2",
     "url-join": "^4.0.0",
@@ -304,7 +300,6 @@
     "@testing-library/jest-dom": "^6.5.0",
     "@testing-library/user-event": "^14.5.2",
     "@types/archiver": "^6.0.2",
-    "@types/bunyan": "^1.8.11",
     "@types/express": "^4.17.21",
     "@types/hast": "^3.0.4",
     "@types/js-cookie": "^3.0.6",
@@ -338,7 +333,6 @@
     "mdast-util-find-and-replace": "^3.0.1",
     "mongodb-connection-string-url": "^7.0.0",
     "mongodb-memory-server-core": "^9.1.1",
-    "morgan": "^1.10.0",
     "openapi-typescript": "^7.8.0",
     "rehype-rewrite": "^4.0.2",
     "remark-github-admonitions-to-directives": "^2.0.0",

+ 6 - 8
apps/app/src/client/components/PageEditor/PageEditor.tsx

@@ -221,10 +221,10 @@ export const PageEditorSubstance = (props: Props): JSX.Element => {
   const save: Save = useCallback(
     async (revisionId, markdown, opts, onConflict) => {
       if (pageId == null || selectedGrant == null) {
-        logger.error('Some materials to save are invalid', {
-          pageId,
-          selectedGrant,
-        });
+        logger.error(
+          { pageId, selectedGrant },
+          'Some materials to save are invalid',
+        );
         throw new Error('Some materials to save are invalid');
       }
 
@@ -251,7 +251,7 @@ export const PageEditorSubstance = (props: Props): JSX.Element => {
 
         return page;
       } catch (error) {
-        logger.error('failed to save', error);
+        logger.error({ err: error }, 'failed to save');
 
         const remoteRevisionData = extractRemoteRevisionDataFromErrorObj(error);
         if (remoteRevisionData != null) {
@@ -329,9 +329,7 @@ export const PageEditorSubstance = (props: Props): JSX.Element => {
   const uploadHandler = useCallback(
     (files: File[]) => {
       if (pageId == null) {
-        logger.error('pageId is invalid', {
-          pageId,
-        });
+        logger.error({ pageId }, 'pageId is invalid');
         throw new Error('pageId is invalid');
       }
 

+ 1 - 1
apps/app/src/client/components/RecentActivity/RecentActivity.tsx

@@ -54,7 +54,7 @@ export const RecentActivity = (props: RecentActivityProps): JSX.Element => {
 
   useEffect(() => {
     if (error) {
-      logger.error('Failed to fetch recent activity data', error);
+      logger.error({ err: error }, 'Failed to fetch recent activity data');
       toastError(error);
       return;
     }

+ 2 - 2
apps/app/src/client/components/StickyStretchableScroller.tsx

@@ -1,5 +1,5 @@
 import type { RefObject } from 'react';
-import React, {
+import {
   type JSX,
   useCallback,
   useEffect,
@@ -73,7 +73,7 @@ export const StickyStretchableScroller = (
     const scrollElement = simplebarRef.current.getScrollElement();
     const newHeight = calcViewHeight(scrollElement);
 
-    logger.debug('Set new height to simplebar', newHeight);
+    logger.debug({ newHeight }, 'Set new height to simplebar');
 
     // set new height
     setSimplebarMaxHeight(newHeight);

+ 7 - 3
apps/app/src/features/admin/states/socket-io.ts

@@ -27,12 +27,16 @@ export const useSetupAdminSocket = (): void => {
       .then(({ default: io }) => {
         if (cancelled) return;
         const newSocket = io('/admin', { transports: ['websocket'] });
-        newSocket.on('connect_error', (error) => logger.error('/admin', error));
-        newSocket.on('error', (error) => logger.error('/admin', error));
+        newSocket.on('connect_error', (error) =>
+          logger.error({ err: error }, '/admin'),
+        );
+        newSocket.on('error', (error) =>
+          logger.error({ err: error }, '/admin'),
+        );
         setSocket(newSocket);
       })
       .catch((error) =>
-        logger.error('Failed to initialize admin WebSocket:', error),
+        logger.error({ err: error }, 'Failed to initialize admin WebSocket'),
       );
 
     return () => {

+ 2 - 2
apps/app/src/features/audit-log-bulk-export/server/service/audit-log-bulk-export-job-cron/index.ts

@@ -203,8 +203,8 @@ class AuditLogBulkExportJobCronService
     action: SupportedActionType,
   ) {
     logger.debug(
-      'Creating activity with targetModel:',
-      SupportedTargetModel.MODEL_AUDIT_LOG_BULK_EXPORT_JOB,
+      { targetModel: SupportedTargetModel.MODEL_AUDIT_LOG_BULK_EXPORT_JOB },
+      'Creating activity with targetModel',
     );
     const activity = await this.crowi.activityService.createActivity({
       action,

+ 2 - 2
apps/app/src/features/comment/server/models/comment.ts

@@ -70,11 +70,11 @@ const add: Add = async function (
       commentPosition,
       replyTo,
     });
-    logger.debug('Comment saved.', data);
+    logger.debug({ data }, 'Comment saved.');
 
     return data;
   } catch (err) {
-    logger.debug('Error on saving comment.', err);
+    logger.debug({ err }, 'Error on saving comment.');
     throw err;
   }
 };

+ 24 - 15
apps/app/src/features/growi-plugin/server/services/growi-plugin/growi-plugin.ts

@@ -98,7 +98,7 @@ export class GrowiPluginService implements IGrowiPluginService {
             growiPlugin.organizationName,
           );
         } catch (err) {
-          logger.error(err);
+          logger.error({ err }, 'Plugin path validation failed');
           continue;
         }
         if (fs.existsSync(pluginPath)) {
@@ -135,12 +135,15 @@ export class GrowiPluginService implements IGrowiPluginService {
               await fs.promises.rm(unzippedReposPath, { recursive: true });
             if (fs.existsSync(pluginPath))
               await fs.promises.rm(pluginPath, { recursive: true });
-            logger.error(err);
+            logger.error({ err }, 'Failed to download plugin repository');
           }
         }
       }
     } catch (err) {
-      logger.error(err);
+      logger.error(
+        { err },
+        'Failed to download non-existent plugin repositories',
+      );
     }
   }
 
@@ -199,7 +202,7 @@ export class GrowiPluginService implements IGrowiPluginService {
       // move new repository from temporary path to storing path.
       fs.renameSync(temporaryReposPath, reposPath);
     } catch (err) {
-      logger.error(err);
+      logger.error({ err }, 'Failed to install plugin');
       throw err;
     } finally {
       // clean up
@@ -222,7 +225,7 @@ export class GrowiPluginService implements IGrowiPluginService {
         await fs.promises.rm(reposPath, { recursive: true });
       await this.deleteOldPluginDocument(installedPath);
 
-      logger.error(err);
+      logger.error({ err }, 'Failed to save plugin metadata');
       throw err;
     }
   }
@@ -253,7 +256,7 @@ export class GrowiPluginService implements IGrowiPluginService {
           }
         })
         .catch((err) => {
-          logger.error(err);
+          logger.error({ err }, 'Failed to download file');
           rejects('Failed to download file.');
         });
     });
@@ -270,7 +273,7 @@ export class GrowiPluginService implements IGrowiPluginService {
         unzipStream.Extract({ path: destPath.toString() }),
       );
     } catch (err) {
-      logger.error(err);
+      logger.error({ err }, 'Failed to unzip');
       throw new Error('Failed to unzip.');
     }
   }
@@ -345,7 +348,7 @@ export class GrowiPluginService implements IGrowiPluginService {
       plugin.meta = await generateTemplatePluginMeta(plugin, validationData);
     }
 
-    logger.info('Plugin detected => ', plugin);
+    logger.info({ plugin }, 'Plugin detected');
 
     return [plugin];
   }
@@ -371,7 +374,10 @@ export class GrowiPluginService implements IGrowiPluginService {
     try {
       await GrowiPlugin.deleteOne({ _id: pluginId });
     } catch (err) {
-      logger.error(err);
+      logger.error(
+        { err },
+        'Failed to delete plugin from GrowiPlugin documents',
+      );
       throw new Error('Failed to delete plugin from GrowiPlugin documents.');
     }
 
@@ -382,7 +388,7 @@ export class GrowiPluginService implements IGrowiPluginService {
         growiPlugins.installedPath,
       );
     } catch (err) {
-      logger.error(err);
+      logger.error({ err }, 'Invalid plugin installedPath');
       throw new Error(
         'The installedPath for the plugin is invalid, and the plugin has already been removed.',
       );
@@ -392,7 +398,7 @@ export class GrowiPluginService implements IGrowiPluginService {
       try {
         await deleteFolder(growiPluginsPath);
       } catch (err) {
-        logger.error(err);
+        logger.error({ err }, 'Failed to delete plugin repository');
         throw new Error('Failed to delete plugin repository.');
       }
     } else {
@@ -423,8 +429,8 @@ export class GrowiPluginService implements IGrowiPluginService {
       });
     } catch (e) {
       logger.error(
+        { err: e },
         `Could not find the theme '${theme}' from GrowiPlugin documents.`,
-        e,
       );
     }
 
@@ -440,7 +446,10 @@ export class GrowiPluginService implements IGrowiPluginService {
       }
       themeHref = `${PLUGIN_EXPRESS_STATIC_DIR}/${matchedPlugin.installedPath}/dist/${manifest[matchedThemeMetadata.manifestKey].file}`;
     } catch (e) {
-      logger.error(`Could not read manifest file for the theme '${theme}'`, e);
+      logger.error(
+        { err: e },
+        `Could not read manifest file for the theme '${theme}'`,
+      );
     }
 
     return {
@@ -479,11 +488,11 @@ export class GrowiPluginService implements IGrowiPluginService {
             entries.push([growiPlugin.installedPath, href]);
           }
         } catch (e) {
-          logger.warn(e);
+          logger.warn({ err: e }, 'Failed to retrieve plugin manifest');
         }
       });
     } catch (e) {
-      logger.error('Could not retrieve GrowiPlugin documents.', e);
+      logger.error({ err: e }, 'Could not retrieve GrowiPlugin documents.');
     }
 
     return entries;

+ 2 - 2
apps/app/src/features/openai/client/components/AiAssistant/AiAssistantSidebar/AiAssistantSidebar.tsx

@@ -373,10 +373,10 @@ const AiAssistantSidebarSubstance: React.FC<
                   mainMessages.push(data.appendedMessage);
                 },
                 onDetectedDiff: (data) => {
-                  logger.debug('sse diff', { data });
+                  logger.debug({ data }, 'sse diff');
                 },
                 onFinalized: (data) => {
-                  logger.debug('sse finalized', { data });
+                  logger.debug({ data }, 'sse finalized');
                 },
               });
             } else if (trimmedLine.startsWith('error:')) {

+ 2 - 2
apps/app/src/features/openai/server/routes/edit/index.ts

@@ -385,7 +385,7 @@ export const postMessageToEditHandlersFactory = (
 
         // Error handler
         stream.once('error', (err) => {
-          logger.error('Stream error:', err);
+          logger.error({ err }, 'Stream error');
 
           // Clean up
           streamProcessor.destroy();
@@ -409,7 +409,7 @@ export const postMessageToEditHandlersFactory = (
         });
       } catch (err) {
         // Clean up and respond on error
-        logger.error('Error in edit handler:', err);
+        logger.error({ err }, 'Error in edit handler');
         streamProcessor.destroy();
         return res.status(500).send(err.message);
       }

+ 22 - 13
apps/app/src/features/openai/server/services/editor-assistant/llm-response-stream-processor.ts

@@ -132,13 +132,16 @@ export class LlmResponseStreamProcessor {
             const validDiff = LlmEditorAssistantDiffSchema.safeParse(item);
             if (!validDiff.success) {
               // Phase 2B: Enhanced error logging for diff validation failures
-              logger.warn('Diff validation failed', {
-                errors: validDiff.error.errors,
-                item: JSON.stringify(item).substring(0, 200),
-                hasStartLine: 'startLine' in item,
-                hasSearch: 'search' in item,
-                hasReplace: 'replace' in item,
-              });
+              logger.warn(
+                {
+                  errors: validDiff.error.errors,
+                  item: JSON.stringify(item).substring(0, 200),
+                  hasStartLine: 'startLine' in item,
+                  hasSearch: 'search' in item,
+                  hasReplace: 'replace' in item,
+                },
+                'Diff validation failed',
+              );
               continue;
             }
 
@@ -146,10 +149,13 @@ export class LlmResponseStreamProcessor {
 
             // Phase 2B: Additional validation for required fields
             if (!diff.startLine) {
-              logger.error('startLine is required but missing in diff', {
-                search: diff.search?.substring(0, 50),
-                replace: diff.replace?.substring(0, 50),
-              });
+              logger.error(
+                {
+                  search: diff.search?.substring(0, 50),
+                  replace: diff.replace?.substring(0, 50),
+                },
+                'startLine is required but missing in diff',
+              );
               continue;
             }
 
@@ -187,7 +193,10 @@ export class LlmResponseStreamProcessor {
       }
     } catch (e) {
       // Ignore parse errors (expected for incomplete JSON)
-      logger.debug('JSON parsing error (expected for partial data):', e);
+      logger.debug(
+        { err: e },
+        'JSON parsing error (expected for partial data)',
+      );
     }
   }
 
@@ -254,7 +263,7 @@ export class LlmResponseStreamProcessor {
       const finalMessage = this.extractFinalMessage(rawBuffer);
       this.options?.dataFinalizedCallback?.(finalMessage, this.replacements);
     } catch (e) {
-      logger.debug('Failed to parse final JSON response:', e);
+      logger.debug({ err: e }, 'Failed to parse final JSON response');
 
       // Send final notification even on error
       const finalMessage = this.extractFinalMessage(rawBuffer);

+ 49 - 31
apps/app/src/features/openai/server/services/openai.ts

@@ -259,8 +259,8 @@ class OpenaiService implements IOpenaiService {
           })
           .catch((err) => {
             logger.error(
-              `Failed to generate thread title for threadId ${thread.id}:`,
-              err,
+              { err },
+              `Failed to generate thread title for threadId ${thread.id}`,
             );
           });
       }
@@ -282,9 +282,9 @@ class OpenaiService implements IOpenaiService {
           threadRelation.threadId,
           vectorStoreId,
         );
-        logger.debug('Update thread', updatedThreadResponse);
+        logger.debug({ data: updatedThreadResponse }, 'Update thread');
       } catch (err) {
-        logger.error(err);
+        logger.error({ err }, 'Failed to update thread');
       }
     }
   }
@@ -321,7 +321,7 @@ class OpenaiService implements IOpenaiService {
       const deletedThreadResponse = await this.client.deleteThread(
         threadRelation.threadId,
       );
-      logger.debug('Delete thread', deletedThreadResponse);
+      logger.debug({ data: deletedThreadResponse }, 'Delete thread');
       await threadRelation.remove();
     } catch (err) {
       await openaiApiErrorHandler(err, {
@@ -351,13 +351,13 @@ class OpenaiService implements IOpenaiService {
         const deleteThreadResponse = await this.client.deleteThread(
           expiredThreadRelation.threadId,
         );
-        logger.debug('Delete thread', deleteThreadResponse);
+        logger.debug({ data: deleteThreadResponse }, 'Delete thread');
         deletedThreadIds.push(expiredThreadRelation.threadId);
 
         // sleep
         await new Promise((resolve) => setTimeout(resolve, apiCallInterval));
       } catch (err) {
-        logger.error(err);
+        logger.error({ err }, 'Failed to delete expired thread');
       }
     }
 
@@ -509,7 +509,7 @@ class OpenaiService implements IOpenaiService {
       const deleteVectorStoreResponse = await this.client.deleteVectorStore(
         vectorStoreDocument.vectorStoreId,
       );
-      logger.debug('Delete vector store', deleteVectorStoreResponse);
+      logger.debug({ data: deleteVectorStoreResponse }, 'Delete vector store');
       await vectorStoreDocument.markAsDeleted();
     } catch (err) {
       await openaiApiErrorHandler(err, {
@@ -563,7 +563,7 @@ class OpenaiService implements IOpenaiService {
               attachment._id,
             );
           } catch (err) {
-            logger.error(err);
+            logger.error({ err }, 'Failed to upload attachment file');
           }
         }
         callback();
@@ -647,7 +647,7 @@ class OpenaiService implements IOpenaiService {
     const fileUploadResult = await Promise.allSettled(workers);
     fileUploadResult.forEach((result) => {
       if (result.status === 'rejected') {
-        logger.error(result.reason);
+        logger.error({ err: result.reason }, 'File upload failed');
       }
     });
 
@@ -677,14 +677,14 @@ class OpenaiService implements IOpenaiService {
           uploadedFileIds,
         );
       logger.debug(
+        { data: createVectorStoreFileBatchResponse },
         'Create vector store file',
-        createVectorStoreFileBatchResponse,
       );
 
       // Set isAttachedToVectorStore: true when the uploaded file is attached to VectorStore
       await VectorStoreFileRelationModel.markAsAttachedToVectorStore(pageIds);
     } catch (err) {
-      logger.error(err);
+      logger.error({ err }, 'Failed to create vector store file batch');
 
       // Delete all uploaded files if createVectorStoreFileBatch fails
       for await (const pageId of pageIds) {
@@ -742,8 +742,8 @@ class OpenaiService implements IOpenaiService {
       const fileId = vectorStoreFileRelation.fileIds[0];
       const deleteFileResponse = await this.client.deleteFile(fileId);
       logger.debug(
-        'Delete vector store file (attachment) ',
-        deleteFileResponse,
+        { data: deleteFileResponse },
+        'Delete vector store file (attachment)',
       );
 
       // Delete related VectorStoreFileRelation document
@@ -752,7 +752,10 @@ class OpenaiService implements IOpenaiService {
         await deleteAllAttachmentVectorStoreFileRelations();
       }
     } catch (err) {
-      logger.error(err);
+      logger.error(
+        { err },
+        'Failed to delete vector store file for attachment',
+      );
       await openaiApiErrorHandler(err, {
         notFoundError: () => deleteAllAttachmentVectorStoreFileRelations(),
       });
@@ -781,7 +784,10 @@ class OpenaiService implements IOpenaiService {
               vectorStoreFileRelation,
             );
           } catch (err) {
-            logger.error(err);
+            logger.error(
+              { err },
+              'Failed to delete vector store file for attachment',
+            );
           }
         }
       }
@@ -800,7 +806,7 @@ class OpenaiService implements IOpenaiService {
     for await (const fileId of vectorStoreFileRelation.fileIds) {
       try {
         const deleteFileResponse = await this.client.deleteFile(fileId);
-        logger.debug('Delete vector store file', deleteFileResponse);
+        logger.debug({ data: deleteFileResponse }, 'Delete vector store file');
         deletedFileIds.push(fileId);
         if (apiCallInterval != null) {
           // sleep
@@ -812,7 +818,7 @@ class OpenaiService implements IOpenaiService {
             deletedFileIds.push(fileId);
           },
         });
-        logger.error(err);
+        logger.error({ err }, 'Failed to delete file');
       }
     }
 
@@ -880,7 +886,7 @@ class OpenaiService implements IOpenaiService {
           apiCallInterval,
         );
       } catch (err) {
-        logger.error(err);
+        logger.error({ err }, 'Failed to delete vector store file');
       }
     }
   }
@@ -896,7 +902,10 @@ class OpenaiService implements IOpenaiService {
     try {
       await this.deleteVectorStoreFileForAttachment(vectorStoreFileRelation);
     } catch (err) {
-      logger.error(err);
+      logger.error(
+        { err },
+        'Failed to delete vector store file on attachment delete',
+      );
     }
   }
 
@@ -1002,10 +1011,13 @@ class OpenaiService implements IOpenaiService {
       }
 
       logger.debug('--------- createVectorStoreFileOnPageCreate ---------');
-      logger.debug('AccessScopeType of aiAssistant: ', aiAssistant.accessScope);
       logger.debug(
-        'VectorStoreFile pagePath to be created: ',
-        pagesToVectorize.map((page) => page.path),
+        { accessScope: aiAssistant.accessScope },
+        'AccessScopeType of aiAssistant',
+      );
+      logger.debug(
+        { pagePaths: pagesToVectorize.map((page) => page.path) },
+        'VectorStoreFile pagePath to be created',
       );
       logger.debug('-----------------------------------------------------');
 
@@ -1038,11 +1050,17 @@ class OpenaiService implements IOpenaiService {
       }
 
       logger.debug('---------- updateVectorStoreOnPageUpdate ------------');
-      logger.debug('AccessScopeType of aiAssistant: ', aiAssistant.accessScope);
-      logger.debug('PagePath of VectorStoreFile to be deleted: ', page.path);
       logger.debug(
-        'pagePath of VectorStoreFile to be created: ',
-        pagesToVectorize.map((page) => page.path),
+        { accessScope: aiAssistant.accessScope },
+        'AccessScopeType of aiAssistant',
+      );
+      logger.debug(
+        { pagePath: page.path },
+        'PagePath of VectorStoreFile to be deleted',
+      );
+      logger.debug(
+        { pagePaths: pagesToVectorize.map((page) => page.path) },
+        'pagePath of VectorStoreFile to be created',
       );
       logger.debug('-----------------------------------------------------');
 
@@ -1089,7 +1107,7 @@ class OpenaiService implements IOpenaiService {
       undefined,
       file.path,
     );
-    logger.debug('Uploaded file', uploadedFile);
+    logger.debug({ data: uploadedFile }, 'Uploaded file');
 
     for await (const aiAssistant of aiAssistants) {
       const pagesToVectorize = await this.filterPagesByAccessScope(
@@ -1152,8 +1170,8 @@ class OpenaiService implements IOpenaiService {
       ) {
         try {
           logger.debug(
-            'Target page path for VectorStoreFile generation: ',
-            chunk.map((page) => page.path),
+            { pagePaths: chunk.map((page) => page.path) },
+            'Target page path for VectorStoreFile generation',
           );
           await createVectorStoreFile(vectorStoreRelation, chunk);
           this.push(chunk);
@@ -1585,7 +1603,7 @@ class OpenaiService implements IOpenaiService {
       0,
     );
 
-    logger.debug('TotalPageCount: ', totalPageCount);
+    logger.debug({ totalPageCount }, 'TotalPageCount');
 
     const limitLearnablePageCountPerAssistant = configManager.getConfig(
       'openai:limitLearnablePageCountPerAssistant',

+ 5 - 4
apps/app/src/features/opentelemetry/server/custom-resource-attributes/application-resource-attributes.ts

@@ -33,13 +33,14 @@ export async function getApplicationResourceAttributes(): Promise<Attributes> {
         growiInfo.additionalInfo?.installedAtByOldestUser?.toISOString(),
     };
 
-    logger.info('Application resource attributes collected', { attributes });
+    logger.info({ attributes }, 'Application resource attributes collected');
 
     return attributes;
   } catch (error) {
-    logger.error('Failed to collect application resource attributes', {
-      error,
-    });
+    logger.error(
+      { err: error },
+      'Failed to collect application resource attributes',
+    );
     return {};
   }
 }

+ 1 - 1
apps/app/src/features/opentelemetry/server/custom-resource-attributes/os-resource-attributes.ts

@@ -28,7 +28,7 @@ export function getOsResourceAttributes(): Attributes {
     'os.totalmem': osInfo.totalmem,
   };
 
-  logger.info('OS resource attributes collected', { attributes });
+  logger.info({ attributes }, 'OS resource attributes collected');
 
   return attributes;
 }

+ 12 - 7
apps/app/src/features/opentelemetry/server/logger.ts

@@ -4,7 +4,7 @@ import loggerFactory from '~/utils/logger';
 
 const logger = loggerFactory('growi:opentelemetry:diag');
 
-class DiagLoggerBunyanAdapter implements DiagLogger {
+class DiagLoggerPinoAdapter implements DiagLogger {
   private parseMessage(
     message: string,
     args: unknown[],
@@ -47,27 +47,32 @@ class DiagLoggerBunyanAdapter implements DiagLogger {
   }
 
   error(message: string, ...args): void {
-    logger.error(...this.parseMessage(message, args));
+    const [msg, data] = this.parseMessage(message, args);
+    logger.error(data, msg);
   }
 
   warn(message: string, ...args): void {
-    logger.warn(...this.parseMessage(message, args));
+    const [msg, data] = this.parseMessage(message, args);
+    logger.warn(data, msg);
   }
 
   info(message: string, ...args): void {
-    logger.info(...this.parseMessage(message, args));
+    const [msg, data] = this.parseMessage(message, args);
+    logger.info(data, msg);
   }
 
   debug(message: string, ...args): void {
-    logger.debug(...this.parseMessage(message, args));
+    const [msg, data] = this.parseMessage(message, args);
+    logger.debug(data, msg);
   }
 
   verbose(message: string, ...args): void {
-    logger.trace(...this.parseMessage(message, args));
+    const [msg, data] = this.parseMessage(message, args);
+    logger.trace(data, msg);
   }
 }
 
 export const initLogger = (): void => {
   // Enable global logger for OpenTelemetry
-  diag.setLogger(new DiagLoggerBunyanAdapter());
+  diag.setLogger(new DiagLoggerPinoAdapter());
 };

+ 1 - 1
apps/app/src/features/opentelemetry/server/node-sdk-configuration.ts

@@ -50,7 +50,7 @@ export const generateNodeSDKConfiguration = (opts?: Option): Configuration => {
       }),
       instrumentations: [
         getNodeAutoInstrumentations({
-          '@opentelemetry/instrumentation-bunyan': {
+          '@opentelemetry/instrumentation-pino': {
             enabled: false,
           },
           // disable fs instrumentation since this generates very large amount of traces

+ 1 - 1
apps/app/src/features/page-bulk-export/server/service/page-bulk-export-job-cron/steps/compress-and-upload.ts

@@ -86,7 +86,7 @@ export async function compressAndUpload(
   pageArchiver.pipe(uploadStream);
 
   pageArchiver.on('error', (err) => {
-    logger.error('pageArchiver error', err);
+    logger.error({ err }, 'pageArchiver error');
     uploadStream.destroy(err);
   });
 

+ 3 - 3
apps/app/src/pages/common-props/commons.ts

@@ -116,23 +116,23 @@ function isValidCommonEachRouteProps(
       p.nextjsRoutingPage !== undefined
     ) {
       logger.warn(
-        'isValidCommonEachRouteProps: nextjsRoutingPage is not a string or null',
         { nextjsRoutingPage: p.nextjsRoutingPage },
+        'isValidCommonEachRouteProps: nextjsRoutingPage is not a string or null',
       );
       return false;
     }
   }
   if (typeof p.currentPathname !== 'string') {
     logger.warn(
-      'isValidCommonEachRouteProps: currentPathname is not a string',
       { currentPathname: p.currentPathname },
+      'isValidCommonEachRouteProps: currentPathname is not a string',
     );
     return false;
   }
   if (typeof p.isMaintenanceMode !== 'boolean') {
     logger.warn(
-      'isValidCommonEachRouteProps: isMaintenanceMode is not a boolean',
       { isMaintenanceMode: p.isMaintenanceMode },
+      'isValidCommonEachRouteProps: isMaintenanceMode is not a boolean',
     );
     return false;
   }

+ 3 - 3
apps/app/src/pages/general-page/type-guards.ts

@@ -20,15 +20,15 @@ export function isValidGeneralPageInitialProps(
   // CommonPageInitialProps
   if (p.nextjsRoutingType === NextjsRoutingType.SAME_ROUTE) {
     logger.warn(
-      'isValidGeneralPageInitialProps: nextjsRoutingType must be equal to NextjsRoutingType.INITIAL or NextjsRoutingType.FROM_OUTSIDE',
       { nextjsRoutingType: p.nextjsRoutingType },
+      'isValidGeneralPageInitialProps: nextjsRoutingType must be equal to NextjsRoutingType.INITIAL or NextjsRoutingType.FROM_OUTSIDE',
     );
     return false;
   }
   if (typeof p.growiVersion !== 'string') {
     logger.warn(
-      'isValidGeneralPageInitialProps: growiVersion is not a string',
       { growiVersion: p.growiVersion },
+      'isValidGeneralPageInitialProps: growiVersion is not a string',
     );
     return false;
   }
@@ -37,8 +37,8 @@ export function isValidGeneralPageInitialProps(
   if (p.meta != null && typeof p.meta === 'object') {
     if (!isIPageInfo(p.meta)) {
       logger.warn(
-        'isValidGeneralPageInitialProps: meta is not a valid IPageInfo',
         { meta: p.meta },
+        'isValidGeneralPageInitialProps: meta is not a valid IPageInfo',
       );
       return false;
     }

+ 3 - 5
apps/app/src/server/app.ts

@@ -1,5 +1,3 @@
-import type Logger from 'bunyan';
-
 import {
   initInstrumentation,
   setupAdditionalResourceAttributes,
@@ -8,17 +6,17 @@ import {
 import loggerFactory from '~/utils/logger';
 import { hasProcessFlag } from '~/utils/process-utils';
 
-const logger: Logger = loggerFactory('growi');
+const logger = loggerFactory('growi');
 
 /** **********************************
  *          Main Process
  ********************************** */
 process.on('uncaughtException', (err?: Error) => {
-  logger.error('Uncaught Exception: ', err);
+  logger.error({ err }, 'Uncaught Exception');
 });
 
 process.on('unhandledRejection', (reason, p) => {
-  logger.error('Unhandled Rejection: Promise:', p, 'Reason:', reason);
+  logger.error({ reason, promise: p }, 'Unhandled Rejection');
 });
 
 async function main() {

+ 11 - 21
apps/app/src/server/crowi/index.ts

@@ -2,6 +2,7 @@ import next from 'next';
 import http from 'node:http';
 import path from 'node:path';
 import { createTerminus } from '@godaddy/terminus';
+import { createHttpLoggerMiddleware } from '@growi/logger';
 import attachmentRoutes from '@growi/remark-attachment-refs/dist/server';
 import lsxRoutes from '@growi/remark-lsx/dist/server/index.cjs';
 import type { Express } from 'express';
@@ -630,27 +631,16 @@ class Crowi {
 
     require('./express-init')(this, express);
 
-    // use bunyan
-    if (env === 'production') {
-      const expressBunyanLogger = require('express-bunyan-logger');
-      const bunyanLogger = loggerFactory('express');
-      express.use(
-        expressBunyanLogger({
-          logger: bunyanLogger,
-          excludes: ['*'],
-        }),
-      );
-    }
-    // use morgan
-    else {
-      const morgan = require('morgan');
-      express.use(
-        morgan('dev', {
-          // supress logging for Next.js static files
-          skip: (req) => req.url?.startsWith('/_next/static/'),
-        }),
-      );
-    }
+    // HTTP request logging via @growi/logger (encapsulates pino-http)
+    const httpLogger = await createHttpLoggerMiddleware({
+      // suppress logging for Next.js static files in development mode
+      ...(env !== 'production' && {
+        autoLogging: {
+          ignore: (req) => req.url?.startsWith('/_next/static/') ?? false,
+        },
+      }),
+    });
+    express.use(httpLogger);
 
     this.express = express;
   }

+ 2 - 2
apps/app/src/server/events/user.ts

@@ -48,10 +48,10 @@ class UserEvent extends EventEmitter {
         const body = `# ${user.username}\nThis is ${user.username}'s page`;
 
         await this.crowi.pageService.create(userHomepagePath, body, user, {});
-        logger.debug('User page created', page);
+        logger.debug({ page }, 'User page created');
       }
     } catch (err) {
-      logger.error('Failed to create user page', err);
+      logger.error({ err }, 'Failed to create user page');
     }
   }
 }

+ 4 - 1
apps/app/src/server/middlewares/access-token-parser/api-token.ts

@@ -27,7 +27,10 @@ export const parserForApiToken = async (
     return;
   }
 
-  logger.debug('accessToken is', accessToken);
+  logger.debug(
+    { accessToken: `${accessToken.slice(0, 4)}...${accessToken.slice(-4)}` },
+    'accessToken is',
+  );
 
   const User = mongoose.model<HydratedDocument<IUser>, { findUserByApiToken }>(
     'User',

+ 3 - 3
apps/app/src/server/middlewares/apiv1-form-validator.ts

@@ -8,9 +8,9 @@ import ApiResponse from '../util/apiResponse';
 const logger = loggerFactory('growi:middlewares:ApiV1FormValidator');
 
 export default (req: Request, res: Response, next: NextFunction): void => {
-  logger.debug('req.query', req.query);
-  logger.debug('req.params', req.params);
-  logger.debug('req.body', req.body);
+  logger.debug({ query: req.query }, 'req.query');
+  logger.debug({ params: req.params }, 'req.params');
+  logger.debug({ body: req.body }, 'req.body');
 
   const errObjArray = validationResult(req);
   if (errObjArray.isEmpty()) {

+ 3 - 3
apps/app/src/server/middlewares/apiv3-form-validator.ts

@@ -11,9 +11,9 @@ export const apiV3FormValidator = (
   res: Response & { apiv3Err },
   next: NextFunction,
 ): void => {
-  logger.debug('req.query', req.query);
-  logger.debug('req.params', req.params);
-  logger.debug('req.body', req.body);
+  logger.debug({ query: req.query }, 'req.query');
+  logger.debug({ params: req.params }, 'req.params');
+  logger.debug({ body: req.body }, 'req.body');
 
   const errObjArray = validationResult(req);
   if (errObjArray.isEmpty()) {

+ 16 - 10
apps/app/src/server/middlewares/certify-shared-page-attachment/validate-referer/validate-referer.ts

@@ -38,16 +38,19 @@ export const validateReferer = (
     refererUrl.hostname !== siteUrl.hostname ||
     refererUrl.port !== siteUrl.port
   ) {
-    logger.warn('The hostname or port mismatched.', {
-      refererUrl: {
-        hostname: refererUrl.hostname,
-        port: refererUrl.port,
-      },
-      siteUrl: {
-        hostname: siteUrl.hostname,
-        port: siteUrl.port,
+    logger.warn(
+      {
+        refererUrl: {
+          hostname: refererUrl.hostname,
+          port: refererUrl.port,
+        },
+        siteUrl: {
+          hostname: siteUrl.hostname,
+          port: siteUrl.port,
+        },
       },
-    });
+      'The hostname or port mismatched.',
+    );
     return false;
   }
 
@@ -60,7 +63,10 @@ export const validateReferer = (
     return false;
   }
   if (match.groups?.shareLinkId == null) {
-    logger.warn(`The pathname ('${refererUrl.pathname}') is invalid.`, match);
+    logger.warn(
+      { match },
+      `The pathname ('${refererUrl.pathname}') is invalid.`,
+    );
     return false;
   }
 

+ 1 - 1
apps/app/src/server/middlewares/login-required.ts

@@ -49,7 +49,7 @@ const loginRequiredFactory = (
 
     // check the route config and ACL
     if (isGuestAllowed && crowi.aclService.isGuestAllowedToRead()) {
-      logger.debug('Allowed to read: ', req.path);
+      logger.debug({ path: req.path }, 'Allowed to read');
       return next();
     }
 

+ 6 - 3
apps/app/src/server/middlewares/safe-redirect.ts

@@ -61,17 +61,20 @@ const factory = (whitelistOfHosts: string[]) => {
         const isWhitelisted = isInWhitelist(whitelistOfHosts, redirectTo);
         if (isWhitelisted) {
           logger.debug(
+            { whitelist: whitelistOfHosts },
             `Requested redirect URL (${redirectTo}) is in whitelist.`,
-            `whitelist=${whitelistOfHosts}`,
           );
           return res.redirect(redirectTo);
         }
         logger.debug(
+          { whitelist: whitelistOfHosts },
           `Requested redirect URL (${redirectTo}) is NOT in whitelist.`,
-          `whitelist=${whitelistOfHosts}`,
         );
       } catch (err) {
-        logger.warn(`Requested redirect URL (${redirectTo}) is invalid.`, err);
+        logger.warn(
+          { err },
+          `Requested redirect URL (${redirectTo}) is invalid.`,
+        );
       }
 
       logger.warn(

+ 1 - 1
apps/app/src/server/models/activity.ts

@@ -100,7 +100,7 @@ activitySchema.index(
 activitySchema.plugin(mongoosePaginate);
 
 activitySchema.post('save', function () {
-  logger.debug('activity has been created', this);
+  logger.debug({ activity: this }, 'activity has been created');
 });
 
 activitySchema.statics.createByParameters = async function (

+ 1 - 1
apps/app/src/server/models/external-account.ts

@@ -80,7 +80,7 @@ schema.statics.findOrRegister = function (
   return this.findOne({ providerType, accountId }).then((account) => {
     // ExternalAccount is found
     if (account != null) {
-      logger.debug(`ExternalAccount '${accountId}' is found `, account);
+      logger.debug({ account }, `ExternalAccount '${accountId}' is found`);
       return account;
     }
 

+ 2 - 2
apps/app/src/server/models/user-group-relation.ts

@@ -104,7 +104,7 @@ schema.statics.findAllRelation = function () {
  * @memberof UserGroupRelation
  */
 schema.statics.findAllRelationForUserGroup = function (userGroup) {
-  logger.debug('findAllRelationForUserGroup is called', userGroup);
+  logger.debug({ userGroup }, 'findAllRelationForUserGroup is called');
   // biome-ignore lint/plugin: allow populate for backward compatibility
   return this.find({ relatedGroup: userGroup }).populate('relatedUser').exec();
 };
@@ -236,7 +236,7 @@ schema.statics.findUserByNotRelatedGroup = function (userGroup, queryOptions) {
       $or: searthField,
     };
 
-    logger.debug('findUserByNotRelatedGroup ', query);
+    logger.debug({ query }, 'findUserByNotRelatedGroup');
     return User.find(query).exec();
   });
 };

+ 2 - 2
apps/app/src/server/routes/apiv3/bookmark-folder.ts

@@ -188,7 +188,7 @@ module.exports = (crowi: Crowi) => {
 
       try {
         const bookmarkFolder = await BookmarkFolder.createByParameters(params);
-        logger.debug('bookmark folder created', bookmarkFolder);
+        logger.debug({ bookmarkFolder }, 'bookmark folder created');
         return res.apiv3({ bookmarkFolder });
       } catch (err) {
         logger.error(err);
@@ -467,7 +467,7 @@ module.exports = (crowi: Crowi) => {
             userId,
             folderId,
           );
-        logger.debug('bookmark added to folder', bookmarkFolder);
+        logger.debug({ bookmarkFolder }, 'bookmark added to folder');
         return res.apiv3({ bookmarkFolder });
       } catch (err) {
         logger.error(err);

+ 10 - 10
apps/app/src/server/routes/apiv3/g2g-transfer.ts

@@ -464,7 +464,7 @@ module.exports = (crowi: Crowi): Router => {
           fileName.length === 0 ||
           fileName.length > 256
         ) {
-          logger.warn('Invalid fileName in attachment metadata.', { fileName });
+          logger.warn({ fileName }, 'Invalid fileName in attachment metadata.');
           return res.apiv3Err(
             new ErrorV3(
               'Invalid fileName in attachment metadata.',
@@ -478,7 +478,7 @@ module.exports = (crowi: Crowi): Router => {
           !Number.isInteger(fileSize) ||
           fileSize < 0
         ) {
-          logger.warn('Invalid fileSize in attachment metadata.', { fileSize });
+          logger.warn({ fileSize }, 'Invalid fileSize in attachment metadata.');
           return res.apiv3Err(
             new ErrorV3(
               'Invalid fileSize in attachment metadata.',
@@ -489,10 +489,10 @@ module.exports = (crowi: Crowi): Router => {
         }
         const count = await Attachment.countDocuments({ fileName, fileSize });
         if (count === 0) {
-          logger.warn('Attachment not found in collection.', {
-            fileName,
-            fileSize,
-          });
+          logger.warn(
+            { fileName, fileSize },
+            'Attachment not found in collection.',
+          );
           return res.apiv3Err(
             new ErrorV3(
               'Attachment not found in collection.',
@@ -526,10 +526,10 @@ module.exports = (crowi: Crowi): Router => {
       // Normalize the path to prevent path traversal attacks
       const resolvedFilePath = path.resolve(file.path);
       if (!isPathWithinBase(resolvedFilePath, importService.baseDir)) {
-        logger.error('Path traversal attack detected', {
-          filePath: resolvedFilePath,
-          baseDir: importService.baseDir,
-        });
+        logger.error(
+          { filePath: resolvedFilePath, baseDir: importService.baseDir },
+          'Path traversal attack detected',
+        );
         return res.apiv3Err(
           new ErrorV3('Invalid file path.', 'invalid_path'),
           400,

+ 18 - 12
apps/app/src/server/routes/apiv3/page/update-page.ts

@@ -145,7 +145,7 @@ export const updatePageHandlersFactory = (crowi: Crowi): RequestHandler[] => {
         req.user,
       );
     } catch (err) {
-      logger.error('Edit notification failed', err);
+      logger.error({ err }, 'Edit notification failed');
     }
 
     // user notification
@@ -163,11 +163,14 @@ export const updatePageHandlersFactory = (crowi: Crowi): RequestHandler[] => {
         );
         for (const result of results) {
           if (result.status === 'rejected') {
-            logger.error('Create user notification failed', result.reason);
+            logger.error(
+              { err: result.reason },
+              'Create user notification failed',
+            );
           }
         }
       } catch (err) {
-        logger.error('Create user notification failed', err);
+        logger.error({ err }, 'Create user notification failed');
       }
     }
 
@@ -180,7 +183,7 @@ export const updatePageHandlersFactory = (crowi: Crowi): RequestHandler[] => {
         const openaiService = getOpenaiService();
         await openaiService?.updateVectorStoreFileOnPageUpdate(updatedPage);
       } catch (err) {
-        logger.error('Rebuild vector store failed', err);
+        logger.error({ err }, 'Rebuild vector store failed');
       }
     }
   }
@@ -305,11 +308,14 @@ export const updatePageHandlersFactory = (crowi: Crowi): RequestHandler[] => {
           try {
             previousRevision = await Revision.findById(sanitizeRevisionId);
           } catch (error) {
-            logger.error('Failed to fetch previousRevision by revisionId', {
-              revisionId: sanitizeRevisionId,
-              pageId: currentPage._id,
-              error,
-            });
+            logger.error(
+              {
+                revisionId: sanitizeRevisionId,
+                pageId: currentPage._id,
+                err: error,
+              },
+              'Failed to fetch previousRevision by revisionId',
+            );
           }
         }
 
@@ -319,12 +325,12 @@ export const updatePageHandlersFactory = (crowi: Crowi): RequestHandler[] => {
             previousRevision = await Revision.findById(currentPage.revision);
           } catch (error) {
             logger.error(
-              'Failed to fetch previousRevision by currentPage.revision',
               {
                 pageId: currentPage._id,
                 revisionId: currentPage.revision,
-                error,
+                err: error,
               },
+              'Failed to fetch previousRevision by currentPage.revision',
             );
           }
         }
@@ -339,7 +345,7 @@ export const updatePageHandlersFactory = (crowi: Crowi): RequestHandler[] => {
           options,
         );
       } catch (err) {
-        logger.error('Error occurred while updating a page.', err);
+        logger.error({ err }, 'Error occurred while updating a page.');
         return res.apiv3Err(err);
       }
 

+ 2 - 2
apps/app/src/server/service/config-manager/config-loader.ts

@@ -31,7 +31,7 @@ export class ConfigLoader implements IConfigLoader<ConfigKey, ConfigValues> {
       };
     }
 
-    logger.debug('loadFromEnv', envConfig);
+    logger.debug({ envConfig }, 'loadFromEnv');
 
     return envConfig;
   }
@@ -62,7 +62,7 @@ export class ConfigLoader implements IConfigLoader<ConfigKey, ConfigValues> {
       };
     }
 
-    logger.debug('loadFromDB', dbConfig);
+    logger.debug({ dbConfig }, 'loadFromDB');
     return dbConfig;
   }
 

+ 1 - 1
apps/app/src/server/service/external-account.ts

@@ -57,7 +57,7 @@ class ExternalAccountService {
           );
           return ExternalAccount.associate(providerId, userInfo.id, err.user);
         }
-        logger.error('provider-DuplicatedUsernameException', providerId);
+        logger.error({ providerId }, 'provider-DuplicatedUsernameException');
 
         throw new ErrorV3(
           'message.provider_duplicated_username_exception',

+ 1 - 1
apps/app/src/server/service/file-uploader/gridfs.ts

@@ -250,7 +250,7 @@ module.exports = (crowi: Crowi) => {
     try {
       // Add error handling to prevent resource leaks
       readable.on('error', (err) => {
-        logger.error('Readable stream error:', err);
+        logger.error({ err }, 'Readable stream error');
         readable.destroy();
         throw err;
       });

+ 38 - 26
apps/app/src/server/service/mail/mail.ts

@@ -82,8 +82,8 @@ class MailService implements S2sMessageHandlable {
         await s2sMessagingService.publish(s2sMessage);
       } catch (e) {
         logger.error(
-          'Failed to publish update message with S2sMessagingService: ',
-          e.message,
+          { err: e },
+          'Failed to publish update message with S2sMessagingService',
         );
       }
     }
@@ -161,14 +161,17 @@ class MailService implements S2sMessageHandlable {
     for (let attempt = 1; attempt <= maxRetries; attempt++) {
       try {
         const result = await this.mailer.sendMail(config);
-        logger.info('OAuth 2.0 email sent successfully', {
-          messageId: result.messageId,
-          from: config.from,
-          recipient: config.to,
-          attempt,
-          clientId: maskedClientId,
-          tag: 'oauth2_email_success',
-        });
+        logger.info(
+          {
+            messageId: result.messageId,
+            from: config.from,
+            recipient: config.to,
+            attempt,
+            clientId: maskedClientId,
+            tag: 'oauth2_email_success',
+          },
+          'OAuth 2.0 email sent successfully',
+        );
         return result;
       } catch (error: unknown) {
         const err = error as Error & { code?: string };
@@ -182,9 +185,8 @@ class MailService implements S2sMessageHandlable {
         }
 
         logger.error(
-          `OAuth 2.0 email send failed (attempt ${attempt}/${maxRetries})`,
           {
-            error: err.message,
+            err,
             code: err.code,
             user: config.from,
             recipient: config.to,
@@ -193,6 +195,7 @@ class MailService implements S2sMessageHandlable {
             timestamp: new Date().toISOString(),
             tag: monitoringTag,
           },
+          `OAuth 2.0 email send failed (attempt ${attempt}/${maxRetries})`,
         );
 
         if (attempt === maxRetries) {
@@ -232,17 +235,23 @@ class MailService implements S2sMessageHandlable {
 
       await FailedEmail.create(failedEmail);
 
-      logger.error('Failed email stored for manual review', {
-        recipient: config.to,
-        errorMessage: error.message,
-        errorCode: error.code,
-      });
+      logger.error(
+        {
+          recipient: config.to,
+          errorMessage: error.message,
+          errorCode: error.code,
+        },
+        'Failed email stored for manual review',
+      );
     } catch (err: unknown) {
       const storeError = err as Error;
-      logger.error('Failed to store failed email', {
-        error: storeError.message,
-        originalError: error.message,
-      });
+      logger.error(
+        {
+          err: storeError,
+          originalError: error.message,
+        },
+        'Failed to store failed email',
+      );
       throw new Error(`Failed to store failed email: ${storeError.message}`);
     }
   }
@@ -270,11 +279,14 @@ class MailService implements S2sMessageHandlable {
 
     // Use sendWithRetry for OAuth 2.0 to handle token refresh failures with exponential backoff
     if (transmissionMethod === 'oauth2') {
-      logger.debug('Sending email via OAuth2 with config:', {
-        from: mailConfig.from,
-        to: mailConfig.to,
-        subject: mailConfig.subject,
-      });
+      logger.debug(
+        {
+          from: mailConfig.from,
+          to: mailConfig.to,
+          subject: mailConfig.subject,
+        },
+        'Sending email via OAuth2 with config',
+      );
       return this.sendWithRetry(mailConfig as EmailConfig);
     }
 

+ 1 - 1
apps/app/src/server/service/mail/oauth2.ts

@@ -71,7 +71,7 @@ export function createOAuth2Client(
 
   const client = nodemailer.createTransport(option);
 
-  logger.debug('mailer set up for OAuth2', client);
+  logger.debug('mailer set up for OAuth2');
 
   return client;
 }

+ 1 - 1
apps/app/src/server/service/mail/ses.ts

@@ -39,7 +39,7 @@ export function createSESClient(
 
   const client = nodemailer.createTransport(ses(option));
 
-  logger.debug('mailer set up for SES', client);
+  logger.debug('mailer set up for SES');
 
   return client;
 }

+ 2 - 2
apps/app/src/server/service/mail/smtp.ts

@@ -23,7 +23,7 @@ export function createSMTPClient(
   configManager: IConfigManagerForApp,
   option?: SMTPTransport.Options,
 ): Transporter | null {
-  logger.debug('createSMTPClient option', option);
+  logger.debug('createSMTPClient called');
 
   let smtpOption: SMTPTransport.Options;
 
@@ -58,7 +58,7 @@ export function createSMTPClient(
 
   const client = nodemailer.createTransport(smtpOption);
 
-  logger.debug('mailer set up for SMTP', client);
+  logger.debug('mailer set up for SMTP');
 
   return client;
 }

+ 3 - 3
apps/app/src/server/service/page/events/seen.ts

@@ -24,13 +24,13 @@ export const onSeen = async (
     const page = await Page.findById(pageId);
 
     if (page == null) {
-      logger.warn('onSeen: page not found', { pageId });
+      logger.warn({ pageId }, 'onSeen: page not found');
       return;
     }
 
     await page.seen(user);
-    logger.debug('onSeen: successfully marked page as seen', { pageId });
+    logger.debug({ pageId }, 'onSeen: successfully marked page as seen');
   } catch (err) {
-    logger.error('onSeen: failed to mark page as seen', err);
+    logger.error({ err }, 'onSeen: failed to mark page as seen');
   }
 };

+ 6 - 9
apps/app/src/server/service/page/index.ts

@@ -2406,7 +2406,7 @@ class PageService implements IPageService {
     const ids = pages.map((page) => page._id);
     const paths = pages.map((page) => page.path);
 
-    logger.debug('Deleting completely', paths);
+    logger.debug({ paths }, 'Deleting completely');
 
     await this.deleteCompletelyOperation(ids, paths);
 
@@ -2461,7 +2461,7 @@ class PageService implements IPageService {
     const ids = [page._id];
     const paths = [page.path];
 
-    logger.debug('Deleting completely', paths);
+    logger.debug({ paths }, 'Deleting completely');
 
     const parameters = {
       ip: activityParameters.ip,
@@ -2598,7 +2598,7 @@ class PageService implements IPageService {
     const ids = [page._id];
     const paths = [page.path];
 
-    logger.debug('Deleting completely', paths);
+    logger.debug({ paths }, 'Deleting completely');
 
     await this.deleteCompletelyOperation(ids, paths);
 
@@ -3692,8 +3692,8 @@ class PageService implements IPageService {
         paths: nonNormalizablePagePaths,
       });
       logger.debug(
+        { paths: nonNormalizablePagePaths },
         'Some pages could not be converted.',
-        nonNormalizablePagePaths,
       );
     }
 
@@ -4219,8 +4219,8 @@ class PageService implements IPageService {
           // Throw if any error is found
           if (res.result.writeErrors.length > 0) {
             logger.error(
+              { writeErrors: res.result.writeErrors },
               'Failed to migrate some pages',
-              res.result.writeErrors,
             );
             socket?.emit(SocketEventName.PMEnded, { isSucceeded: false });
             throw Error('Failed to migrate some pages');
@@ -4230,11 +4230,8 @@ class PageService implements IPageService {
           if (res.result.nModified === 0 && res.result.nMatched === 0) {
             shouldContinue = false;
             logger.error(
+              { parentPaths, bulkWriteResult: res },
               'Migration is unable to continue',
-              'parentPaths:',
-              parentPaths,
-              'bulkWriteResult:',
-              res,
             );
             socket?.emit(SocketEventName.PMEnded, { isSucceeded: false });
           }

+ 7 - 8
apps/app/src/server/service/s2s-messaging/nchan.ts

@@ -67,7 +67,7 @@ class NchanDelegator extends AbstractS2sMessagingService {
 
     const url = this.constructUrl(this.publishPath).toString();
 
-    logger.debug('Publish message', s2sMessage, `to ${url}`);
+    logger.debug({ s2sMessage, url }, 'Publish message');
 
     return axios.post(url, s2sMessage);
   }
@@ -134,7 +134,7 @@ class NchanDelegator extends AbstractS2sMessagingService {
       logger.info('WebSocket client disconnected');
     });
     socket.addEventListener('error', (error) => {
-      logger.error('WebSocket error occured:', error.message);
+      logger.error({ err: error }, 'WebSocket error occured');
     });
 
     socket.addEventListener('open', () => {
@@ -163,8 +163,8 @@ class NchanDelegator extends AbstractS2sMessagingService {
       // check uid
       if (s2sMessage.publisherUid === this.uid) {
         logger.debug(
-          `Skip processing by ${handlable.constructor.name} because this message is sent by the publisher itself:`,
-          `from ${this.uid}`,
+          { publisherUid: this.uid },
+          `Skip processing by ${handlable.constructor.name} because this message is sent by the publisher itself`,
         );
         return;
       }
@@ -172,16 +172,15 @@ class NchanDelegator extends AbstractS2sMessagingService {
       // check shouldHandleS2sMessage
       const shouldHandle = handlable.shouldHandleS2sMessage(s2sMessage);
       logger.debug(
-        `${handlable.constructor.name}.shouldHandleS2sMessage(`,
-        s2sMessage,
-        `) => ${shouldHandle}`,
+        { s2sMessage, shouldHandle },
+        `${handlable.constructor.name}.shouldHandleS2sMessage`,
       );
 
       if (shouldHandle) {
         handlable.handleS2sMessage(s2sMessage);
       }
     } catch (err) {
-      logger.warn('Could not handle a message: ', err.message);
+      logger.warn({ err }, 'Could not handle a message');
     }
   }
 }

+ 4 - 4
apps/app/src/server/service/search-delegator/elasticsearch.ts

@@ -646,7 +646,7 @@ class ElasticsearchDelegator
       this.prepareBodyForDelete(body, page);
     });
 
-    logger.debug('deletePages(): Sending Request to ES', body);
+    logger.debug({ body }, 'deletePages(): Sending Request to ES');
     return this.client.bulk({
       body,
     });
@@ -664,7 +664,7 @@ class ElasticsearchDelegator
   ): Promise<ISearchResult<ISearchResultData>> {
     // for debug
     if (process.env.NODE_ENV === 'development') {
-      logger.debug('query: ', JSON.stringify(query, null, 2));
+      logger.debug({ query }, 'query');
 
       const validateQueryResponse = await (async () => {
         if (isES7ClientDelegator(this.client)) {
@@ -700,7 +700,7 @@ class ElasticsearchDelegator
       })();
 
       // for debug
-      logger.debug('ES result: ', validateQueryResponse);
+      logger.debug({ validateQueryResponse }, 'ES result');
     }
 
     const searchResponse = await (async () => {
@@ -1034,7 +1034,7 @@ class ElasticsearchDelegator
     const count = (await User.count({})) || 1;
 
     const minScore = queryString.length * 0.1 - 1; // increase with length
-    logger.debug('min_score: ', minScore);
+    logger.debug({ minScore }, 'min_score');
 
     query.body.query = {
       function_score: {

+ 4 - 4
apps/app/src/server/service/slack-integration.ts

@@ -250,8 +250,8 @@ export class SlackIntegrationService implements S2sMessageHandlable {
     try {
       await client.chat.postMessage(messageArgs);
     } catch (error) {
-      logger.debug('Post error', error);
-      logger.debug('Sent data to slack is:', messageArgs);
+      logger.debug({ err: error }, 'Post error');
+      logger.debug({ messageArgs }, 'Sent data to slack');
       throw error;
     }
   }
@@ -264,8 +264,8 @@ export class SlackIntegrationService implements S2sMessageHandlable {
     try {
       await slackLegacyUtil.postMessage(messageArgs);
     } catch (error) {
-      logger.debug('Post error', error);
-      logger.debug('Sent data to slack is:', messageArgs);
+      logger.debug({ err: error }, 'Post error');
+      logger.debug({ messageArgs }, 'Sent data to slack');
       throw error;
     }
   }

+ 3 - 3
apps/app/src/server/service/socket-io/socket-io.ts

@@ -178,7 +178,7 @@ export class SocketIoService {
       const clients = await this.getAdminSocket().fetchSockets();
       const clientsCount = clients.length;
 
-      logger.debug("Current count of clients for '/admin':", clientsCount);
+      logger.debug({ clientsCount }, "Current count of clients for '/admin'");
 
       const limit = configManager.getConfig(
         's2cMessagingPubsub:connectionsLimitForAdmin',
@@ -198,7 +198,7 @@ export class SocketIoService {
     if (socket.request.user == null) {
       const clientsCount = this.guestClients.size;
 
-      logger.debug('Current count of clients for guests:', clientsCount);
+      logger.debug({ clientsCount }, 'Current count of clients for guests');
 
       const limit = configManager.getConfig(
         's2cMessagingPubsub:connectionsLimitForGuest',
@@ -227,7 +227,7 @@ export class SocketIoService {
     const clients = await this.getDefaultSocket().fetchSockets();
     const clientsCount = clients.length;
 
-    logger.debug("Current count of clients for '/':", clientsCount);
+    logger.debug({ clientsCount }, "Current count of clients for '/'");
 
     const limit = configManager.getConfig(
       's2cMessagingPubsub:connectionsLimit',

+ 2 - 2
apps/app/src/server/service/yjs/create-mongodb-persistence.ts

@@ -33,7 +33,7 @@ export const createMongoDBPersistence = (
   const persistence: YWebsocketPersistence = {
     provider: mdb,
     bindState: async (docName: string, ydoc: WSSharedDoc) => {
-      logger.debug('bindState', { docName });
+      logger.debug({ docName }, 'bindState');
 
       const persistedYdoc = await mdb.getYDoc(docName);
 
@@ -93,7 +93,7 @@ export const createMongoDBPersistence = (
       });
     },
     writeState: async (docName: string) => {
-      logger.debug('writeState', { docName });
+      logger.debug({ docName }, 'writeState');
       // flush document on close to have the smallest possible database
       await mdb.flushDocument(docName);
     },

+ 9 - 6
apps/app/src/server/service/yjs/upgrade-handler.ts

@@ -89,7 +89,7 @@ export const createUpgradeHandler = (sessionConfig: SessionConfig) => {
   ): Promise<UpgradeResult> => {
     const pageId = extractPageId(request.url);
     if (pageId == null) {
-      logger.warn('Invalid URL path for Yjs upgrade', { url: request.url });
+      logger.warn({ url: request.url }, 'Invalid URL path for Yjs upgrade');
       writeErrorResponse(socket, 400, 'Bad Request');
       return { authorized: false, statusCode: 400 };
     }
@@ -100,7 +100,7 @@ export const createUpgradeHandler = (sessionConfig: SessionConfig) => {
       await runMiddleware(passportInit as ConnectMiddleware, request);
       await runMiddleware(passportSession as ConnectMiddleware, request);
     } catch (err) {
-      logger.warn('Session/passport middleware failed on upgrade', { err });
+      logger.warn({ err }, 'Session/passport middleware failed on upgrade');
       writeErrorResponse(socket, 401, 'Unauthorized');
       return { authorized: false, statusCode: 401 };
     }
@@ -114,10 +114,13 @@ export const createUpgradeHandler = (sessionConfig: SessionConfig) => {
     if (!isAccessible) {
       const statusCode = user == null ? 401 : 403;
       const message = user == null ? 'Unauthorized' : 'Forbidden';
-      logger.warn(`Yjs upgrade rejected: ${message}`, {
-        pageId,
-        userId: user?._id,
-      });
+      logger.warn(
+        {
+          pageId,
+          userId: user?._id,
+        },
+        `Yjs upgrade rejected: ${message}`,
+      );
       writeErrorResponse(socket, statusCode, message);
       return { authorized: false, statusCode };
     }

+ 2 - 2
apps/app/src/server/service/yjs/yjs.ts

@@ -102,7 +102,7 @@ class YjsService implements IYjsService {
       } catch (err) {
         guard.restore();
 
-        logger.error('Yjs upgrade handler failed unexpectedly', { url, err });
+        logger.error({ url, err }, 'Yjs upgrade handler failed unexpectedly');
         if (socket.writable) {
           socket.write('HTTP/1.1 500 Internal Server Error\r\n\r\n');
         }
@@ -116,8 +116,8 @@ class YjsService implements IYjsService {
   public async getYDocStatus(pageId: string): Promise<YDocStatus> {
     const dumpLog = (status: YDocStatus, args?: { [key: string]: unknown }) => {
       logger.debug(
-        `getYDocStatus('${pageId}') detected '${status}'`,
         args ?? {},
+        `getYDocStatus('${pageId}') detected '${status}'`,
       );
     };
 

+ 4 - 4
apps/app/src/server/util/slack-legacy.ts

@@ -27,8 +27,8 @@ export const slackLegacyUtilFactory = (
     try {
       await webhook.send(messageObj);
     } catch (error) {
-      logger.debug('Post error', error);
-      logger.debug('Sent data to slack is:', messageObj);
+      logger.debug({ err: error }, 'Post error');
+      logger.debug({ messageObj }, 'Sent data to slack');
       throw error;
     }
   };
@@ -38,8 +38,8 @@ export const slackLegacyUtilFactory = (
     try {
       await client.chat.postMessage(messageObj);
     } catch (error) {
-      logger.debug('Post error', error);
-      logger.debug('Sent data to slack is:', messageObj);
+      logger.debug({ err: error }, 'Post error');
+      logger.debug({ messageObj }, 'Sent data to slack');
       throw error;
     }
   };

+ 3 - 3
apps/app/src/states/socket-io/global-socket.ts

@@ -42,16 +42,16 @@ export const useSetupGlobalSocket = (): void => {
 
       // Error handling
       newSocket.on('error', (err) => {
-        logger.error(err);
+        logger.error({ err }, 'Socket error');
       });
       newSocket.on('connect_error', (err) => {
-        logger.error('Failed to connect with websocket.', err);
+        logger.error({ err }, 'Failed to connect with websocket.');
       });
 
       // Store connection in atom
       setSocket(newSocket);
     } catch (error) {
-      logger.error('Failed to initialize WebSocket:', error);
+      logger.error({ err: error }, 'Failed to initialize WebSocket');
     }
   }, [setSocket]);
 

+ 5 - 10
apps/app/src/utils/logger/index.ts

@@ -1,18 +1,13 @@
-import type Logger from 'bunyan';
-import { createLogger, type UniversalBunyanConfig } from 'universal-bunyan';
+import type { Logger } from '@growi/logger';
+import { initializeLoggerFactory, loggerFactory } from '@growi/logger';
 
 import configForDev from '^/config/logger/config.dev';
 import configForProd from '^/config/logger/config.prod';
 
 const isProduction = process.env.NODE_ENV === 'production';
-const config = (
-  isProduction ? configForProd : configForDev
-) as UniversalBunyanConfig;
+const config = isProduction ? configForProd : configForDev;
 
-const loggerFactory = (name: string): Logger =>
-  createLogger({
-    name,
-    config,
-  });
+initializeLoggerFactory({ config });
 
+export type { Logger };
 export default loggerFactory;

+ 3 - 1
apps/app/tsconfig.build.server.json

@@ -17,7 +17,9 @@
     }
   },
   "exclude": [
-    "config",
+    "config/ci",
+    "config/*.js",
+    "config/*.spec.ts",
     "resource",
     "src/client",
     "src/components",

+ 3 - 9
apps/slackbot-proxy/package.json

@@ -35,6 +35,7 @@
   },
   "dependencies": {
     "@godaddy/terminus": "^4.9.0",
+    "@growi/logger": "workspace:^",
     "@growi/slack": "workspace:^",
     "@slack/oauth": "^3.0.3",
     "@slack/web-api": "^6.2.4",
@@ -47,15 +48,12 @@
     "@tsed/typeorm": "=6.43.0",
     "axios": "^1.11.0",
     "body-parser": "^1.20.3",
-    "browser-bunyan": "^1.6.3",
-    "bunyan": "^1.8.15",
     "compression": "^1.7.4",
     "cookie-parser": "^1.4.5",
     "cross-env": "^7.0.0",
     "date-fns": "^3.6.0",
     "dotenv-flow": "^3.2.0",
     "express": "^4.20.0",
-    "express-bunyan-logger": "^1.3.3",
     "extensible-custom-error": "^0.0.7",
     "helmet": "^4.6.0",
     "http-errors": "^2.0.0",
@@ -63,8 +61,7 @@
     "mysql2": "^2.2.5",
     "read-pkg-up": "^7.0.1",
     "tslib": "^2.8.0",
-    "typeorm": "=0.2.45",
-    "universal-bunyan": "^0.9.2"
+    "typeorm": "=0.2.45"
   },
   "// comments for devDependencies": {
     "@tsed/*": "v6.133.1 causes 'TypeError: Cannot read properties of undefined (reading 'prototype')' with `@Middleware()`",
@@ -75,9 +72,6 @@
     "@tsed/core": "=6.43.0",
     "@tsed/exceptions": "=6.43.0",
     "@tsed/json-mapper": "=6.43.0",
-    "@types/bunyan": "^1.8.11",
-    "bootstrap": "^5.3.8",
-    "browser-bunyan": "^1.6.3",
-    "morgan": "^1.10.0"
+    "bootstrap": "^5.3.8"
   }
 }

+ 6 - 20
apps/slackbot-proxy/src/Server.ts

@@ -3,13 +3,13 @@ import '@tsed/swagger';
 import '@tsed/typeorm'; // !! DO NOT MODIFY !! -- https://github.com/tsedio/tsed/issues/1332#issuecomment-837840612
 
 import { createTerminus } from '@godaddy/terminus';
+import { createHttpLoggerMiddleware } from '@growi/logger';
 import { HttpServer, PlatformApplication } from '@tsed/common';
 import { Configuration, Inject, InjectorService } from '@tsed/di';
 import bodyParser from 'body-parser';
 import compress from 'compression';
 import cookieParser from 'cookie-parser';
 import type { Express } from 'express';
-import expressBunyanLogger from 'express-bunyan-logger';
 import helmet from 'helmet';
 import methodOverride from 'method-override';
 import type { ConnectionOptions } from 'typeorm';
@@ -122,7 +122,7 @@ export class Server {
     }
   }
 
-  $beforeRoutesInit(): void {
+  async $beforeRoutesInit(): Promise<void> {
     this.app
       .use(cookieParser())
       .use(compress({}))
@@ -134,7 +134,7 @@ export class Server {
         }),
       );
 
-    this.setupLogger();
+    await this.setupLogger();
   }
 
   $afterRoutesInit(): void {
@@ -161,22 +161,8 @@ export class Server {
   /**
    * Setup logger for requests
    */
-  private setupLogger(): void {
-    // use bunyan
-    if (isProduction) {
-      const logger = loggerFactory('express');
-
-      this.app.use(
-        expressBunyanLogger({
-          logger,
-          excludes: ['*'],
-        }),
-      );
-    }
-    // use morgan
-    else {
-      const morgan = require('morgan');
-      this.app.use(morgan('dev'));
-    }
+  private async setupLogger(): Promise<void> {
+    const httpLogger = await createHttpLoggerMiddleware();
+    this.app.use(httpLogger);
   }
 }

+ 2 - 2
apps/slackbot-proxy/src/config/logger/config.dev.ts

@@ -1,6 +1,6 @@
-import { UniversalBunyanConfig } from 'universal-bunyan';
+import type { LoggerConfig } from '@growi/logger';
 
-const config: UniversalBunyanConfig = {
+const config: LoggerConfig = {
   default: 'info',
 
   // 'express-session': 'debug',

+ 2 - 2
apps/slackbot-proxy/src/config/logger/config.prod.ts

@@ -1,6 +1,6 @@
-import { UniversalBunyanConfig } from 'universal-bunyan';
+import type { LoggerConfig } from '@growi/logger';
 
-const config: UniversalBunyanConfig = {
+const config: LoggerConfig = {
   default: 'info',
 
   // 'express-session': 'debug',

+ 5 - 5
apps/slackbot-proxy/src/controllers/growi-to-slack.ts

@@ -106,7 +106,7 @@ export class GrowiToSlackCtrl {
       .leftJoinAndSelect('relation.installation', 'installation')
       .getMany();
 
-    logger.debug(`${relations.length} relations found`, relations);
+    logger.debug({ relations }, `${relations.length} relations found`);
 
     // key: tokenGtoP, value: botToken
     const botTokenResolverMapping: { [tokenGtoP: string]: string } = {};
@@ -176,7 +176,7 @@ export class GrowiToSlackCtrl {
 
     // Returns the result of the test if it already exists
     if (relation != null) {
-      logger.debug('relation found', relation);
+      logger.debug({ relation }, 'relation found');
 
       const token = relation.installation.data.bot?.token;
       if (token == null) {
@@ -228,7 +228,7 @@ export class GrowiToSlackCtrl {
       throw createError(400, `failed to request to GROWI. err: ${err.message}`);
     }
 
-    logger.debug('order found', order);
+    logger.debug({ order }, 'order found');
 
     const token = order.installation.data.bot?.token;
     if (token == null) {
@@ -240,7 +240,7 @@ export class GrowiToSlackCtrl {
       throw createError(400, `failed to get connection. err: ${status.error}`);
     }
 
-    logger.debug('relation test is success', order);
+    logger.debug({ order }, 'relation test is success');
 
     // temporary cache for 48 hours
     const expiredAtCommands = addHours(new Date(), 48);
@@ -356,7 +356,7 @@ export class GrowiToSlackCtrl {
   ): Promise<WebclientRes> {
     const { tokenGtoPs } = req;
 
-    logger.debug('Slack API called: ', { method });
+    logger.debug({ method }, 'Slack API called: ');
 
     if (tokenGtoPs.length !== 1) {
       return res.simulateWebAPIPlatformError(

+ 5 - 2
apps/slackbot-proxy/src/controllers/slack.ts

@@ -385,7 +385,10 @@ export class SlackCtrl {
     @Res() res: Res,
     // biome-ignore lint/suspicious/noConfusingVoidType: TODO: fix in https://redmine.weseek.co.jp/issues/168174
   ): Promise<void | string | Res | WebAPICallResult> {
-    logger.info('receive interaction', req.authorizeResult);
+    logger.info(
+      { authorizeResult: req.authorizeResult },
+      'receive interaction',
+    );
     logger.debug('receive interaction', req.body);
 
     const {
@@ -583,7 +586,7 @@ export class SlackCtrl {
     const installPromise = new Promise<Installation>((resolve, reject) => {
       this.installerService.installer.handleCallback(req, serverRes, {
         success: async (installation, metadata) => {
-          logger.info('Success to install', { installation, metadata });
+          logger.info({ installation, metadata }, 'Success to install');
           resolve(installation);
         },
         failure: async (error) => {

+ 1 - 1
apps/slackbot-proxy/src/middlewares/slack-to-growi/authorizer.ts

@@ -1,6 +1,6 @@
+import type { Logger } from '@growi/logger';
 import { AuthorizeResult, InstallationQuery } from '@slack/oauth';
 import { IMiddleware, Inject, Middleware, Next, Req, Res } from '@tsed/common';
-import Logger from 'bunyan';
 import createError from 'http-errors';
 
 import { SlackOauthReq } from '~/interfaces/slack-to-growi/slack-oauth-req';

+ 1 - 1
apps/slackbot-proxy/src/middlewares/slack-to-growi/join-to-conversation.ts

@@ -1,6 +1,6 @@
+import type { Logger } from '@growi/logger';
 import { generateWebClient } from '@growi/slack/dist/utils/webclient-factory';
 import { IMiddleware, Middleware, Req } from '@tsed/common';
-import Logger from 'bunyan';
 
 import { SlackOauthReq } from '~/interfaces/slack-to-growi/slack-oauth-req';
 import loggerFactory from '~/utils/logger';

+ 1 - 1
apps/slackbot-proxy/src/services/RegisterService.ts

@@ -162,7 +162,7 @@ export class RegisterService
       await this.insertOrderRecord(authorizeResult, interactionPayloadAccessor);
     } catch (err) {
       if (err instanceof InvalidUrlError) {
-        logger.error('Failed to register:\n', err);
+        logger.error({ err }, 'Failed to register:');
         await respond(interactionPayloadAccessor.getResponseUrl(), {
           text: 'Invalid URL',
           blocks: [markdownSectionBlock('Please enter a valid URL')],

+ 4 - 7
apps/slackbot-proxy/src/utils/logger/index.ts

@@ -1,5 +1,5 @@
-import Logger from 'bunyan';
-import { createLogger } from 'universal-bunyan';
+import type { Logger } from '@growi/logger';
+import { initializeLoggerFactory, loggerFactory } from '@growi/logger';
 
 import configForDev from '~/config/logger/config.dev';
 import configForProd from '~/config/logger/config.prod';
@@ -7,10 +7,7 @@ import configForProd from '~/config/logger/config.prod';
 const isProduction = process.env.NODE_ENV === 'production';
 const config = isProduction ? configForProd : configForDev;
 
-const loggerFactory = (name: string): Logger =>
-  createLogger({
-    name,
-    config,
-  });
+initializeLoggerFactory({ config });
 
+export type { Logger };
 export default loggerFactory;

+ 0 - 1
package.json

@@ -101,7 +101,6 @@
     "ignoredBuiltDependencies": [
       "@swc/core",
       "core-js",
-      "dtrace-provider",
       "esbuild",
       "leveldown",
       "protobufjs",

+ 1 - 0
packages/logger/.gitignore

@@ -0,0 +1 @@
+/dist

+ 43 - 0
packages/logger/package.json

@@ -0,0 +1,43 @@
+{
+  "name": "@growi/logger",
+  "version": "1.0.0",
+  "description": "Pino-based logger factory for GROWI",
+  "license": "MIT",
+  "private": true,
+  "type": "module",
+  "main": "dist/index.cjs",
+  "module": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "exports": {
+    ".": {
+      "import": "./dist/index.js",
+      "require": "./dist/index.cjs"
+    }
+  },
+  "scripts": {
+    "build": "vite build",
+    "clean": "shx rm -rf dist",
+    "dev": "vite build --mode dev",
+    "watch": "pnpm run dev -w --emptyOutDir=false",
+    "lint:biome": "biome check",
+    "lint:typecheck": "tsgo --noEmit",
+    "lint": "npm-run-all -p lint:*",
+    "test": "vitest run"
+  },
+  "dependencies": {
+    "minimatch": "^9.0.0",
+    "pino": "^9.0.0",
+    "pino-http": "^11.0.0"
+  },
+  "peerDependencies": {
+    "pino-pretty": "^13.0.0"
+  },
+  "peerDependenciesMeta": {
+    "pino-pretty": {
+      "optional": true
+    }
+  },
+  "devDependencies": {
+    "pino-pretty": "^13.0.0"
+  }
+}

+ 130 - 0
packages/logger/src/dev/bunyan-format.spec.ts

@@ -0,0 +1,130 @@
+import { PassThrough, Writable } from 'node:stream';
+import { describe, expect, it } from 'vitest';
+
+import bunyanFormat from './bunyan-format';
+
+function createWithCapture(opts: { singleLine?: boolean } = {}) {
+  const dest = new PassThrough();
+  const chunks: string[] = [];
+  dest.on('data', (chunk: Buffer) => chunks.push(chunk.toString()));
+  const stream = bunyanFormat({ ...opts, colorize: false, destination: dest });
+  return { stream, chunks };
+}
+
+function writeLine(
+  stream: NodeJS.WritableStream,
+  log: Record<string, unknown>,
+) {
+  stream.write(`${JSON.stringify(log)}\n`);
+}
+
+describe('bunyan-format transport', () => {
+  it('returns a writable stream', () => {
+    const { stream } = createWithCapture();
+    expect(stream).toBeDefined();
+    expect(stream).toBeInstanceOf(Writable);
+  });
+
+  it('formats log output as HH:mm:ss.SSSZ LEVEL name: message', async () => {
+    const { stream, chunks } = createWithCapture({ singleLine: true });
+
+    writeLine(stream, {
+      level: 20,
+      time: new Date('2026-03-30T10:06:30.419Z').getTime(),
+      name: 'growi:service:page',
+      msg: 'some message',
+    });
+
+    await new Promise((resolve) => setTimeout(resolve, 50));
+
+    const output = chunks.join('');
+    expect(output).toBe(
+      '10:06:30.419Z DEBUG growi:service:page: some message\n',
+    );
+  });
+
+  it('right-aligns level labels to 5 characters', async () => {
+    const { stream, chunks } = createWithCapture({ singleLine: true });
+
+    writeLine(stream, {
+      level: 30,
+      time: Date.now(),
+      name: 'test',
+      msg: 'info',
+    });
+    writeLine(stream, {
+      level: 40,
+      time: Date.now(),
+      name: 'test',
+      msg: 'warn',
+    });
+    writeLine(stream, {
+      level: 10,
+      time: Date.now(),
+      name: 'test',
+      msg: 'trace',
+    });
+
+    await new Promise((resolve) => setTimeout(resolve, 50));
+
+    const output = chunks.join('');
+    expect(output).toContain(' INFO test:');
+    expect(output).toContain(' WARN test:');
+    expect(output).toContain('TRACE test:');
+  });
+
+  it('appends extra fields on a new line when singleLine is false', async () => {
+    const { stream, chunks } = createWithCapture({ singleLine: false });
+
+    writeLine(stream, {
+      level: 20,
+      time: Date.now(),
+      name: 'test',
+      msg: 'hello',
+      extra: 'value',
+    });
+
+    await new Promise((resolve) => setTimeout(resolve, 50));
+
+    const output = chunks.join('');
+    expect(output).toContain('hello');
+    // pino-pretty formats extra fields as `key: "value"` on a new indented line
+    expect(output).toContain('\n    extra: "value"');
+  });
+
+  it('appends extra fields inline when singleLine is true', async () => {
+    const { stream, chunks } = createWithCapture({ singleLine: true });
+
+    writeLine(stream, {
+      level: 30,
+      time: Date.now(),
+      name: 'test',
+      msg: 'hello',
+      extra: 'value',
+    });
+
+    await new Promise((resolve) => setTimeout(resolve, 50));
+
+    const output = chunks.join('');
+    expect(output).toContain('hello {"extra":"value"}');
+  });
+
+  it('excludes pid and hostname from extra fields', async () => {
+    const { stream, chunks } = createWithCapture({ singleLine: true });
+
+    writeLine(stream, {
+      level: 30,
+      time: Date.now(),
+      name: 'test',
+      msg: 'hello',
+      pid: 12345,
+      hostname: 'myhost',
+    });
+
+    await new Promise((resolve) => setTimeout(resolve, 50));
+
+    const output = chunks.join('');
+    expect(output).not.toContain('pid');
+    expect(output).not.toContain('hostname');
+  });
+});

+ 73 - 0
packages/logger/src/dev/bunyan-format.ts

@@ -0,0 +1,73 @@
+import { Writable } from 'node:stream';
+import { prettyFactory } from 'pino-pretty';
+
+interface BunyanFormatOptions {
+  singleLine?: boolean;
+  colorize?: boolean;
+  destination?: NodeJS.WritableStream;
+}
+
+const ANAI_COLORS = ['gray', 'green', 'yellow', 'red'] as const;
+
+const LEVEL_SETTINGS: Record<
+  number,
+  { label: string; color: (typeof ANAI_COLORS)[number] }
+> = {
+  10: {
+    label: 'TRACE',
+    color: 'gray',
+  },
+  20: { label: 'DEBUG', color: 'gray' },
+  30: { label: 'INFO', color: 'green' },
+  40: { label: 'WARN', color: 'yellow' },
+  50: { label: 'ERROR', color: 'red' },
+  60: { label: 'FATAL', color: 'red' },
+};
+
+/**
+ * Custom pino transport producing bunyan-format "short" mode output.
+ * Format: HH:mm:ss.SSSZ LEVEL name: message
+ *
+ * Development only — this module is never imported in production.
+ */
+// biome-ignore lint/style/noDefaultExport: pino transports require a default export for thread-stream Worker loading
+export default (opts: BunyanFormatOptions) => {
+  const singleLine = opts.singleLine ?? false;
+  const destination = opts.destination ?? process.stdout;
+
+  const pretty = prettyFactory({
+    colorize: opts.colorize ?? !process.env.NO_COLOR,
+    ignore: 'pid,hostname,name,req,res,responseTime',
+    translateTime: false,
+    singleLine,
+    // Suppress pino-pretty's default time and level rendering; we handle them in messageFormat
+    customPrettifiers: { time: () => '', level: () => '' },
+    messageFormat: (log, messageKey, _levelLabel, { colors }) => {
+      const time = new Date(log.time as number).toISOString().slice(11);
+      const levelNum = log.level as number;
+      const label = LEVEL_SETTINGS[levelNum]?.label ?? 'INFO';
+      const name = (log.name as string) ?? '';
+      const msg = String(log[messageKey] ?? '');
+
+      const padding = ' '.repeat(Math.max(0, 5 - label.length));
+      const c = colors as unknown as Record<string, (s: string) => string>;
+      const levelColor =
+        c[LEVEL_SETTINGS[levelNum]?.color ?? 'reset'] ?? String;
+
+      return `${c.gray(time)} ${levelColor(`${padding}${label}`)} ${c.white(`${name}:`)} ${msg}`;
+    },
+  });
+
+  return new Writable({
+    write(chunk, _encoding, callback) {
+      for (const line of chunk.toString().split('\n').filter(Boolean)) {
+        try {
+          destination.write(pretty(JSON.parse(line)) ?? '');
+        } catch {
+          destination.write(`${line}\n`);
+        }
+      }
+      callback();
+    },
+  });
+};

+ 87 - 0
packages/logger/src/dev/morgan-like-format-options.spec.ts

@@ -0,0 +1,87 @@
+import type { IncomingMessage, ServerResponse } from 'node:http';
+import { describe, expect, it } from 'vitest';
+
+import { morganLikeFormatOptions } from './morgan-like-format-options';
+
+// Strip ANSI escape codes for plain-text assertions (avoids control-char lint rule)
+const ANSI_RE = new RegExp(`${String.fromCharCode(27)}\\[\\d+m`, 'g');
+const strip = (s: string) => s.replace(ANSI_RE, '');
+
+function fakeReq(method: string, url: string): IncomingMessage {
+  return { method, url } as IncomingMessage;
+}
+
+function fakeRes(statusCode: number): ServerResponse {
+  return { statusCode } as unknown as ServerResponse;
+}
+
+describe('morganLikeFormatOptions', () => {
+  describe('customSuccessMessage', () => {
+    it('formats as METHOD /url STATUS - TIMEms', () => {
+      const msg = morganLikeFormatOptions.customSuccessMessage(
+        fakeReq('GET', '/page/path'),
+        fakeRes(200),
+        12.4,
+      );
+      expect(strip(msg)).toBe('GET /page/path 200 - 12ms');
+    });
+
+    it('rounds responseTime to nearest integer', () => {
+      const msg = morganLikeFormatOptions.customSuccessMessage(
+        fakeReq('POST', '/api'),
+        fakeRes(201),
+        0.7,
+      );
+      expect(strip(msg)).toBe('POST /api 201 - 1ms');
+    });
+  });
+
+  describe('customErrorMessage', () => {
+    it('includes error message', () => {
+      const msg = morganLikeFormatOptions.customErrorMessage(
+        fakeReq('PUT', '/data'),
+        fakeRes(500),
+        new Error('db timeout'),
+      );
+      expect(strip(msg)).toBe('PUT /data 500 - db timeout');
+    });
+  });
+
+  describe('customLogLevel', () => {
+    it('returns info for 2xx responses', () => {
+      const level = morganLikeFormatOptions.customLogLevel(
+        fakeReq('GET', '/'),
+        fakeRes(200),
+        undefined,
+      );
+      expect(level).toBe('info');
+    });
+
+    it('returns warn for 4xx responses', () => {
+      const level = morganLikeFormatOptions.customLogLevel(
+        fakeReq('GET', '/'),
+        fakeRes(404),
+        undefined,
+      );
+      expect(level).toBe('warn');
+    });
+
+    it('returns error for 5xx responses', () => {
+      const level = morganLikeFormatOptions.customLogLevel(
+        fakeReq('GET', '/'),
+        fakeRes(503),
+        undefined,
+      );
+      expect(level).toBe('error');
+    });
+
+    it('returns error when error object is present', () => {
+      const level = morganLikeFormatOptions.customLogLevel(
+        fakeReq('GET', '/'),
+        fakeRes(200),
+        new Error('unexpected'),
+      );
+      expect(level).toBe('error');
+    });
+  });
+});

+ 65 - 0
packages/logger/src/dev/morgan-like-format-options.ts

@@ -0,0 +1,65 @@
+import type { IncomingMessage, ServerResponse } from 'node:http';
+
+/**
+ * Morgan-like log message formatters for pino-http.
+ *
+ * Produces concise one-liner messages in the style of morgan's "combined" format:
+ *   GET /page/path 200 - 12ms
+ *
+ * Usage with pino-http:
+ *   pinoHttp({ ...morganLikeFormatOptions, logger })
+ */
+
+const NO_COLOR = Boolean(process.env.NO_COLOR);
+const RESET = NO_COLOR ? '' : '\x1b[0m';
+const DIM = NO_COLOR ? '' : '\x1b[2m';
+
+function statusAnsi(status: number): string {
+  if (NO_COLOR) return '';
+  if (status >= 500) return '\x1b[31m'; // red
+  if (status >= 400) return '\x1b[33m'; // yellow
+  if (status >= 300) return '\x1b[36m'; // cyan
+  return '\x1b[32m'; // green
+}
+
+type CustomSuccessMessage = (
+  req: IncomingMessage,
+  res: ServerResponse,
+  responseTime: number,
+) => string;
+
+type CustomErrorMessage = (
+  req: IncomingMessage,
+  res: ServerResponse,
+  error: Error,
+) => string;
+
+type LogLevel = 'info' | 'warn' | 'error';
+
+type CustomLogLevel = (
+  req: IncomingMessage,
+  res: ServerResponse,
+  error: Error | undefined,
+) => LogLevel;
+
+export const morganLikeFormatOptions: {
+  customSuccessMessage: CustomSuccessMessage;
+  customErrorMessage: CustomErrorMessage;
+  customLogLevel: CustomLogLevel;
+} = {
+  customSuccessMessage: (req, res, responseTime) => {
+    const sc = statusAnsi(res.statusCode);
+    return `${req.method} ${RESET}${req.url} ${sc}${res.statusCode}${RESET} - ${DIM}${Math.round(responseTime)}ms${RESET}`;
+  },
+
+  customErrorMessage: (req, res, error) => {
+    const sc = statusAnsi(res.statusCode);
+    return `${req.method} ${RESET}${req.url} ${sc}${res.statusCode}${RESET} - ${error.message}`;
+  },
+
+  customLogLevel: (_req, res, error) => {
+    if (error != null || res.statusCode >= 500) return 'error';
+    if (res.statusCode >= 400) return 'warn';
+    return 'info';
+  },
+};

+ 92 - 0
packages/logger/src/env-var-parser.spec.ts

@@ -0,0 +1,92 @@
+import { afterEach, beforeEach, describe, expect, it } from 'vitest';
+
+import { parseEnvLevels } from './env-var-parser';
+
+describe('parseEnvLevels', () => {
+  const originalEnv = process.env;
+
+  beforeEach(() => {
+    // Reset env before each test
+    process.env = { ...originalEnv };
+    delete process.env.DEBUG;
+    delete process.env.TRACE;
+    delete process.env.INFO;
+    delete process.env.WARN;
+    delete process.env.ERROR;
+    delete process.env.FATAL;
+  });
+
+  afterEach(() => {
+    process.env = originalEnv;
+  });
+
+  it('returns empty object when no env vars are set', () => {
+    const result = parseEnvLevels();
+    expect(result).toEqual({});
+  });
+
+  it('parses a single namespace from DEBUG', () => {
+    process.env.DEBUG = 'growi:service:page';
+    const result = parseEnvLevels();
+    expect(result).toEqual({ 'growi:service:page': 'debug' });
+  });
+
+  it('parses multiple comma-separated namespaces from DEBUG', () => {
+    process.env.DEBUG = 'growi:routes:*,growi:service:page';
+    const result = parseEnvLevels();
+    expect(result).toEqual({
+      'growi:routes:*': 'debug',
+      'growi:service:page': 'debug',
+    });
+  });
+
+  it('parses all six level env vars', () => {
+    process.env.DEBUG = 'ns:debug';
+    process.env.TRACE = 'ns:trace';
+    process.env.INFO = 'ns:info';
+    process.env.WARN = 'ns:warn';
+    process.env.ERROR = 'ns:error';
+    process.env.FATAL = 'ns:fatal';
+    const result = parseEnvLevels();
+    expect(result).toEqual({
+      'ns:debug': 'debug',
+      'ns:trace': 'trace',
+      'ns:info': 'info',
+      'ns:warn': 'warn',
+      'ns:error': 'error',
+      'ns:fatal': 'fatal',
+    });
+  });
+
+  it('trims whitespace around namespace patterns', () => {
+    process.env.DEBUG = ' growi:service , growi:routes ';
+    const result = parseEnvLevels();
+    expect(result).toEqual({
+      'growi:service': 'debug',
+      'growi:routes': 'debug',
+    });
+  });
+
+  it('ignores empty entries from trailing/double commas', () => {
+    process.env.DEBUG = 'growi:service,,growi:routes,';
+    const result = parseEnvLevels();
+    expect(result).toEqual({
+      'growi:service': 'debug',
+      'growi:routes': 'debug',
+    });
+  });
+
+  it('uses the last value when the same namespace appears in multiple env vars', () => {
+    process.env.DEBUG = 'growi:service';
+    process.env.TRACE = 'growi:service';
+    const result = parseEnvLevels();
+    // TRACE is processed after DEBUG, so it wins
+    expect(result['growi:service']).toBe('trace');
+  });
+
+  it('supports glob wildcard patterns', () => {
+    process.env.DEBUG = 'growi:*';
+    const result = parseEnvLevels();
+    expect(result).toEqual({ 'growi:*': 'debug' });
+  });
+});

+ 33 - 0
packages/logger/src/env-var-parser.ts

@@ -0,0 +1,33 @@
+import type { LoggerConfig } from './types';
+
+const LEVEL_ENV_VARS: ReadonlyArray<[string, string]> = [
+  ['DEBUG', 'debug'],
+  ['TRACE', 'trace'],
+  ['INFO', 'info'],
+  ['WARN', 'warn'],
+  ['ERROR', 'error'],
+  ['FATAL', 'fatal'],
+];
+
+/**
+ * Parse log-level environment variables into a namespace-to-level map.
+ * Reads: DEBUG, TRACE, INFO, WARN, ERROR, FATAL from process.env.
+ * Later entries in the list override earlier ones for the same namespace.
+ */
+export function parseEnvLevels(): Omit<LoggerConfig, 'default'> {
+  const result: Record<string, string> = {};
+
+  for (const [envVar, level] of LEVEL_ENV_VARS) {
+    const value = process.env[envVar];
+    if (!value) continue;
+
+    for (const pattern of value.split(',')) {
+      const trimmed = pattern.trim();
+      if (trimmed) {
+        result[trimmed] = level;
+      }
+    }
+  }
+
+  return result;
+}

+ 98 - 0
packages/logger/src/http-logger.spec.ts

@@ -0,0 +1,98 @@
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
+
+// Mock pino-http before importing
+vi.mock('pino-http', () => {
+  const pinoHttp = vi.fn((_opts: unknown) => {
+    return (_req: unknown, _res: unknown, next: () => void) => next();
+  });
+  return { default: pinoHttp };
+});
+
+// Mock logger-factory
+vi.mock('./logger-factory', () => ({
+  loggerFactory: vi.fn(() => ({
+    level: 'info',
+    info: vi.fn(),
+    debug: vi.fn(),
+    warn: vi.fn(),
+    error: vi.fn(),
+    trace: vi.fn(),
+    fatal: vi.fn(),
+  })),
+}));
+
+describe('createHttpLoggerMiddleware', () => {
+  const originalEnv = process.env;
+
+  beforeEach(() => {
+    process.env = { ...originalEnv };
+    vi.resetModules();
+  });
+
+  afterEach(() => {
+    process.env = originalEnv;
+  });
+
+  it('returns an Express-compatible middleware function', async () => {
+    const { createHttpLoggerMiddleware } = await import('./http-logger');
+    const middleware = await createHttpLoggerMiddleware();
+    expect(typeof middleware).toBe('function');
+  });
+
+  it('uses "express" as the default namespace', async () => {
+    const { loggerFactory } = await import('./logger-factory');
+    const { createHttpLoggerMiddleware } = await import('./http-logger');
+    await createHttpLoggerMiddleware();
+    expect(loggerFactory).toHaveBeenCalledWith('express');
+  });
+
+  it('accepts a custom namespace', async () => {
+    const { loggerFactory } = await import('./logger-factory');
+    const { createHttpLoggerMiddleware } = await import('./http-logger');
+    await createHttpLoggerMiddleware({ namespace: 'custom-http' });
+    expect(loggerFactory).toHaveBeenCalledWith('custom-http');
+  });
+
+  it('passes autoLogging options to pino-http', async () => {
+    const pinoHttp = (await import('pino-http')).default;
+    const { createHttpLoggerMiddleware } = await import('./http-logger');
+
+    const ignoreFn = (req: { url?: string }) =>
+      req.url?.startsWith('/_next/') ?? false;
+    await createHttpLoggerMiddleware({ autoLogging: { ignore: ignoreFn } });
+
+    expect(pinoHttp).toHaveBeenCalledWith(
+      expect.objectContaining({
+        autoLogging: { ignore: ignoreFn },
+      }),
+    );
+  });
+
+  it('applies morganLikeFormatOptions in development mode', async () => {
+    process.env.NODE_ENV = 'development';
+    const pinoHttp = (await import('pino-http')).default;
+    const { createHttpLoggerMiddleware } = await import('./http-logger');
+    await createHttpLoggerMiddleware();
+
+    expect(pinoHttp).toHaveBeenCalledWith(
+      expect.objectContaining({
+        customSuccessMessage: expect.any(Function),
+        customErrorMessage: expect.any(Function),
+        customLogLevel: expect.any(Function),
+      }),
+    );
+  });
+
+  it('does not apply morganLikeFormatOptions in production mode', async () => {
+    process.env.NODE_ENV = 'production';
+    const pinoHttp = (await import('pino-http')).default;
+    const { createHttpLoggerMiddleware } = await import('./http-logger');
+    await createHttpLoggerMiddleware();
+
+    const callArgs = (pinoHttp as ReturnType<typeof vi.fn>).mock
+      .calls[0][0] as Record<string, unknown>;
+    expect(callArgs.customSuccessMessage).toBeUndefined();
+    expect(callArgs.customErrorMessage).toBeUndefined();
+    expect(callArgs.customLogLevel).toBeUndefined();
+  });
+});

+ 47 - 0
packages/logger/src/http-logger.ts

@@ -0,0 +1,47 @@
+import type { IncomingMessage, ServerResponse } from 'node:http';
+import type { HttpLogger, Options as PinoHttpOptions } from 'pino-http';
+
+import { loggerFactory } from './logger-factory';
+
+interface HttpLoggerOptions {
+  /** Logger namespace, defaults to 'express' */
+  namespace?: string;
+  /** Auto-logging configuration (e.g., route ignore patterns) */
+  autoLogging?: {
+    ignore: (req: { url?: string }) => boolean;
+  };
+}
+
+/**
+ * Create Express middleware for HTTP request logging.
+ * In dev: uses pino-http with morgan-like formatting (dynamically imported).
+ * In prod: uses pino-http with default formatting.
+ *
+ * The pino-http dependency is encapsulated here — consumer apps
+ * should not import pino-http directly.
+ */
+export async function createHttpLoggerMiddleware(
+  options?: HttpLoggerOptions,
+): Promise<HttpLogger<IncomingMessage, ServerResponse>> {
+  const namespace = options?.namespace ?? 'express';
+  const logger = loggerFactory(namespace);
+
+  const httpOptions: PinoHttpOptions = {
+    // Logger<string> → pino-http's expected Logger type
+    logger: logger as unknown as PinoHttpOptions['logger'],
+    ...(options?.autoLogging != null
+      ? { autoLogging: options.autoLogging }
+      : {}),
+  };
+
+  // In development, dynamically import morgan-like format options
+  if (process.env.NODE_ENV !== 'production') {
+    const { morganLikeFormatOptions } = await import(
+      './dev/morgan-like-format-options'
+    );
+    Object.assign(httpOptions, morganLikeFormatOptions);
+  }
+
+  const { default: pinoHttp } = await import('pino-http');
+  return pinoHttp(httpOptions);
+}

+ 9 - 0
packages/logger/src/index.ts

@@ -0,0 +1,9 @@
+export { parseEnvLevels } from './env-var-parser';
+export { createHttpLoggerMiddleware } from './http-logger';
+export { resolveLevel } from './level-resolver';
+export { initializeLoggerFactory, loggerFactory } from './logger-factory';
+export {
+  createBrowserOptions,
+  createNodeTransportOptions,
+} from './transport-factory';
+export type { Logger, LoggerConfig, LoggerFactoryOptions } from './types';

+ 103 - 0
packages/logger/src/level-resolver.spec.ts

@@ -0,0 +1,103 @@
+import { describe, expect, it } from 'vitest';
+
+import { resolveLevel } from './level-resolver';
+import type { LoggerConfig } from './types';
+
+describe('resolveLevel', () => {
+  const baseConfig: LoggerConfig = {
+    default: 'info',
+    'growi:service:page': 'debug',
+    'growi:routes:*': 'debug',
+    'growi:crowi': 'debug',
+  };
+
+  describe('config pattern matching', () => {
+    it('returns default level when no pattern matches', () => {
+      const result = resolveLevel('growi:unknown', baseConfig, {});
+      expect(result).toBe('info');
+    });
+
+    it('returns level for exact namespace match', () => {
+      const result = resolveLevel('growi:crowi', baseConfig, {});
+      expect(result).toBe('debug');
+    });
+
+    it('matches glob wildcard pattern', () => {
+      const result = resolveLevel('growi:routes:login', baseConfig, {});
+      expect(result).toBe('debug');
+    });
+
+    it('does not match partial namespace without wildcard', () => {
+      const config: LoggerConfig = {
+        default: 'warn',
+        'growi:service': 'debug',
+      };
+      // 'growi:service:page' should NOT match 'growi:service' (no wildcard)
+      const result = resolveLevel('growi:service:page', config, {});
+      expect(result).toBe('warn');
+    });
+
+    it('uses config default when provided', () => {
+      const config: LoggerConfig = { default: 'error' };
+      const result = resolveLevel('growi:anything', config, {});
+      expect(result).toBe('error');
+    });
+  });
+
+  describe('env override precedence', () => {
+    it('env override takes precedence over config pattern', () => {
+      const envOverrides = { 'growi:service:page': 'trace' };
+      const result = resolveLevel(
+        'growi:service:page',
+        baseConfig,
+        envOverrides,
+      );
+      expect(result).toBe('trace');
+    });
+
+    it('env override glob takes precedence over config exact match', () => {
+      const envOverrides = { 'growi:*': 'fatal' };
+      const result = resolveLevel('growi:crowi', baseConfig, envOverrides);
+      expect(result).toBe('fatal');
+    });
+
+    it('falls back to config when no env override matches', () => {
+      const envOverrides = { 'other:ns': 'trace' };
+      const result = resolveLevel('growi:crowi', baseConfig, envOverrides);
+      expect(result).toBe('debug');
+    });
+
+    it('falls back to config default when neither env nor config pattern matches', () => {
+      const envOverrides = { 'other:ns': 'trace' };
+      const result = resolveLevel('growi:unknown:ns', baseConfig, envOverrides);
+      expect(result).toBe('info');
+    });
+  });
+
+  describe('glob pattern matching', () => {
+    it('matches deep wildcard patterns', () => {
+      const config: LoggerConfig = {
+        default: 'info',
+        'growi:service:*': 'debug',
+      };
+      const result = resolveLevel('growi:service:auth', config, {});
+      expect(result).toBe('debug');
+    });
+
+    it('env override wildcard applies to multiple namespaces', () => {
+      const envOverrides = { 'growi:service:*': 'trace' };
+      const result1 = resolveLevel(
+        'growi:service:page',
+        baseConfig,
+        envOverrides,
+      );
+      const result2 = resolveLevel(
+        'growi:service:user',
+        baseConfig,
+        envOverrides,
+      );
+      expect(result1).toBe('trace');
+      expect(result2).toBe('trace');
+    });
+  });
+});

+ 38 - 0
packages/logger/src/level-resolver.ts

@@ -0,0 +1,38 @@
+import { minimatch } from 'minimatch';
+
+import type { LoggerConfig } from './types';
+
+/**
+ * Resolve the log level for a namespace.
+ * Priority: env var match > config pattern match > config default.
+ */
+export function resolveLevel(
+  namespace: string,
+  config: LoggerConfig,
+  envOverrides: Omit<LoggerConfig, 'default'>,
+): string {
+  // 1. Check env overrides first (highest priority)
+  for (const [pattern, level] of Object.entries(envOverrides)) {
+    if (matchesPattern(namespace, pattern)) {
+      return level;
+    }
+  }
+
+  // 2. Check config patterns (excluding the 'default' key)
+  for (const [pattern, level] of Object.entries(config)) {
+    if (pattern === 'default') continue;
+    if (matchesPattern(namespace, pattern)) {
+      return level;
+    }
+  }
+
+  // 3. Fall back to config default
+  return config.default;
+}
+
+function matchesPattern(namespace: string, pattern: string): boolean {
+  // Exact match
+  if (namespace === pattern) return true;
+  // Glob match using minimatch
+  return minimatch(namespace, pattern);
+}

+ 189 - 0
packages/logger/src/logger-factory.spec.ts

@@ -0,0 +1,189 @@
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
+
+import { initializeLoggerFactory, loggerFactory } from './logger-factory';
+import type { LoggerConfig } from './types';
+
+// ---------------------------------------------------------------------------
+// Shared-transport test: pino.transport() must be called exactly once,
+// and each namespace logger must be created via rootLogger.child(), not pino().
+// ---------------------------------------------------------------------------
+describe('shared transport — single Worker thread (Req 11)', () => {
+  afterEach(() => {
+    vi.restoreAllMocks();
+    vi.doUnmock('pino');
+    vi.resetModules();
+  });
+
+  it('pino() and pino.transport() are called once in initializeLoggerFactory; child() is called per namespace', async () => {
+    vi.resetModules();
+
+    const childSpy = vi.fn().mockImplementation(() => ({
+      level: 'info',
+      info: vi.fn(),
+      debug: vi.fn(),
+      warn: vi.fn(),
+      error: vi.fn(),
+      trace: vi.fn(),
+      fatal: vi.fn(),
+      child: childSpy,
+    }));
+
+    const mockRootLogger = {
+      level: 'trace',
+      child: childSpy,
+      info: vi.fn(),
+      debug: vi.fn(),
+      warn: vi.fn(),
+      error: vi.fn(),
+      trace: vi.fn(),
+      fatal: vi.fn(),
+    };
+
+    const transportSpy = vi.fn().mockReturnValue({});
+    const pinoSpy = vi.fn().mockReturnValue(mockRootLogger) as ReturnType<
+      typeof vi.fn
+    > & {
+      transport: ReturnType<typeof vi.fn>;
+    };
+    pinoSpy.transport = transportSpy;
+
+    vi.doMock('pino', () => ({ default: pinoSpy }));
+
+    const { initializeLoggerFactory: init, loggerFactory: factory } =
+      await import('./logger-factory');
+
+    init({ config: { default: 'info', 'growi:debug:*': 'debug' } });
+
+    // After initialization: pino() called once (root logger), transport() called once
+    expect(pinoSpy).toHaveBeenCalledTimes(1);
+    expect(transportSpy).toHaveBeenCalledTimes(1);
+
+    // Create loggers for three distinct namespaces
+    factory('growi:service:page');
+    factory('growi:service:user');
+    factory('growi:debug:something');
+
+    // pino() must NOT be called again — no new instances, no new Worker threads
+    expect(pinoSpy).toHaveBeenCalledTimes(1);
+    // transport() must NOT be called again
+    expect(transportSpy).toHaveBeenCalledTimes(1);
+    // child() must be called once per new namespace
+    expect(childSpy).toHaveBeenCalledTimes(3);
+  });
+
+  it('re-initializing creates a new root logger (one additional pino() call)', async () => {
+    vi.resetModules();
+
+    const childSpy = vi.fn().mockImplementation(() => ({
+      level: 'info',
+      info: vi.fn(),
+      debug: vi.fn(),
+      warn: vi.fn(),
+      error: vi.fn(),
+      trace: vi.fn(),
+      fatal: vi.fn(),
+      child: childSpy,
+    }));
+
+    const mockRootLogger = {
+      level: 'trace',
+      child: childSpy,
+      info: vi.fn(),
+      debug: vi.fn(),
+      warn: vi.fn(),
+      error: vi.fn(),
+      trace: vi.fn(),
+      fatal: vi.fn(),
+    };
+
+    const transportSpy = vi.fn().mockReturnValue({});
+    const pinoSpy = vi.fn().mockReturnValue(mockRootLogger) as ReturnType<
+      typeof vi.fn
+    > & {
+      transport: ReturnType<typeof vi.fn>;
+    };
+    pinoSpy.transport = transportSpy;
+
+    vi.doMock('pino', () => ({ default: pinoSpy }));
+
+    const { initializeLoggerFactory: init, loggerFactory: factory } =
+      await import('./logger-factory');
+
+    init({ config: { default: 'info' } });
+    factory('growi:ns1');
+
+    const callsAfterFirst = pinoSpy.mock.calls.length; // 1
+
+    // Re-initialize — should create a new root logger
+    init({ config: { default: 'warn' } });
+    factory('growi:ns1');
+
+    expect(pinoSpy).toHaveBeenCalledTimes(callsAfterFirst + 1);
+  });
+});
+
+// Reset the module-level cache/state between tests
+beforeEach(() => {
+  vi.resetModules();
+});
+
+afterEach(() => {
+  vi.restoreAllMocks();
+});
+
+describe('initializeLoggerFactory + loggerFactory', () => {
+  const config: LoggerConfig = {
+    default: 'info',
+    'growi:debug:*': 'debug',
+  };
+
+  it('returns a logger with info() method', () => {
+    initializeLoggerFactory({ config });
+    const logger = loggerFactory('growi:test');
+    expect(typeof logger.info).toBe('function');
+    expect(typeof logger.debug).toBe('function');
+    expect(typeof logger.warn).toBe('function');
+    expect(typeof logger.error).toBe('function');
+    expect(typeof logger.trace).toBe('function');
+    expect(typeof logger.fatal).toBe('function');
+  });
+
+  it('returns the same logger instance for the same namespace (cache hit)', () => {
+    initializeLoggerFactory({ config });
+    const logger1 = loggerFactory('growi:service:page');
+    const logger2 = loggerFactory('growi:service:page');
+    expect(logger1).toBe(logger2);
+  });
+
+  it('returns different logger instances for different namespaces', () => {
+    initializeLoggerFactory({ config });
+    const logger1 = loggerFactory('growi:service:page');
+    const logger2 = loggerFactory('growi:service:user');
+    expect(logger1).not.toBe(logger2);
+  });
+
+  it('resolves log level from config for matched pattern', () => {
+    initializeLoggerFactory({ config });
+    const logger = loggerFactory('growi:debug:something');
+    expect(logger.level).toBe('debug');
+  });
+
+  it('uses default level when no pattern matches', () => {
+    initializeLoggerFactory({ config });
+    const logger = loggerFactory('growi:unmatched:ns');
+    expect(logger.level).toBe('info');
+  });
+
+  it('re-initializing clears the cache', () => {
+    initializeLoggerFactory({ config });
+    const logger1 = loggerFactory('growi:service:page');
+
+    // Re-initialize with different config
+    initializeLoggerFactory({ config: { default: 'warn' } });
+    const logger2 = loggerFactory('growi:service:page');
+
+    // After re-init, cache is cleared — new instance
+    expect(logger1).not.toBe(logger2);
+    expect(logger2.level).toBe('warn');
+  });
+});

+ 90 - 0
packages/logger/src/logger-factory.ts

@@ -0,0 +1,90 @@
+import type { Logger } from 'pino';
+import pino from 'pino';
+
+import { parseEnvLevels } from './env-var-parser';
+import { resolveLevel } from './level-resolver';
+import {
+  createBrowserOptions,
+  createNodeTransportOptions,
+} from './transport-factory';
+import type { LoggerConfig, LoggerFactoryOptions } from './types';
+
+const isBrowser =
+  typeof window !== 'undefined' && typeof window.document !== 'undefined';
+
+let moduleConfig: LoggerConfig = { default: 'info' };
+let envOverrides: Omit<LoggerConfig, 'default'> = {};
+const loggerCache = new Map<string, Logger<string>>();
+
+// Shared root logger. pino.transport() is called once here so that all
+// namespace loggers share a single Worker thread (pino's performance model).
+let rootLogger: Logger<string> | null = null;
+
+function assertRootLogger(
+  logger: Logger<string> | null,
+): asserts logger is Logger<string> {
+  if (logger == null) {
+    throw new Error(
+      'rootLogger is not initialized. Call initializeLoggerFactory() first.',
+    );
+  }
+}
+
+/**
+ * Initialize the logger factory with configuration.
+ * Creates the pino transport and root logger ONCE so that all namespace
+ * loggers share a single Worker thread — preserving pino's performance model.
+ * Must be called once at application startup before any loggerFactory() calls.
+ * Subsequent calls clear the cache and create a fresh root logger.
+ */
+export function initializeLoggerFactory(options: LoggerFactoryOptions): void {
+  moduleConfig = options.config;
+  envOverrides = parseEnvLevels();
+  loggerCache.clear();
+
+  const isProduction = process.env.NODE_ENV === 'production';
+
+  if (isBrowser) {
+    // Browser: no Worker thread involved; use pino's built-in browser mode.
+    // Root level is 'trace' so each child can apply its own resolved level.
+    const { browser } = createBrowserOptions(isProduction);
+    rootLogger = pino({ level: 'trace', browser }) as Logger<string>;
+  } else {
+    // Node.js: call pino.transport() ONCE here.
+    // Every subsequent loggerFactory() call uses rootLogger.child() which
+    // shares this single Worker thread rather than spawning a new one.
+    const { transport } = createNodeTransportOptions(isProduction);
+    rootLogger = (
+      transport != null
+        ? pino({ level: 'trace' }, pino.transport(transport))
+        : pino({ level: 'trace' })
+    ) as Logger<string>;
+  }
+}
+
+/**
+ * Create or retrieve a cached pino logger for the given namespace.
+ * Returns a child of the shared root logger so the Worker thread is reused.
+ */
+export function loggerFactory(name: string): Logger<string> {
+  const cached = loggerCache.get(name);
+  if (cached != null) {
+    return cached;
+  }
+
+  if (rootLogger == null) {
+    // Auto-initialize with default config if the caller skipped the explicit init.
+    initializeLoggerFactory({ config: moduleConfig });
+  }
+
+  assertRootLogger(rootLogger);
+
+  const level = resolveLevel(name, moduleConfig, envOverrides);
+
+  // child() shares the root logger's transport — no new Worker thread spawned.
+  const logger = rootLogger.child({ name });
+  logger.level = level;
+
+  loggerCache.set(name, logger);
+  return logger;
+}

+ 73 - 0
packages/logger/src/transport-factory.spec.ts

@@ -0,0 +1,73 @@
+import { afterEach, beforeEach, describe, expect, it } from 'vitest';
+
+import { createNodeTransportOptions } from './transport-factory';
+
+describe('createNodeTransportOptions', () => {
+  const originalEnv = process.env;
+
+  beforeEach(() => {
+    process.env = { ...originalEnv };
+    delete process.env.FORMAT_NODE_LOG;
+  });
+
+  afterEach(() => {
+    process.env = originalEnv;
+  });
+
+  describe('development mode', () => {
+    it('returns bunyan-format transport config', () => {
+      const opts = createNodeTransportOptions(false);
+      expect(opts.transport).toBeDefined();
+      expect(opts.transport?.target).toContain('bunyan-format');
+    });
+
+    it('passes no options (singleLine defaults to false inside bunyan-format)', () => {
+      const opts = createNodeTransportOptions(false);
+      expect(opts.transport?.options).toBeUndefined();
+    });
+  });
+
+  describe('production mode — raw JSON', () => {
+    it('returns no transport when FORMAT_NODE_LOG is "false"', () => {
+      process.env.FORMAT_NODE_LOG = 'false';
+      const opts = createNodeTransportOptions(true);
+      expect(opts.transport).toBeUndefined();
+    });
+
+    it('returns no transport when FORMAT_NODE_LOG is "0"', () => {
+      process.env.FORMAT_NODE_LOG = '0';
+      const opts = createNodeTransportOptions(true);
+      expect(opts.transport).toBeUndefined();
+    });
+  });
+
+  describe('production mode — formatted (pino-pretty)', () => {
+    it('returns pino-pretty transport when FORMAT_NODE_LOG is unset', () => {
+      delete process.env.FORMAT_NODE_LOG;
+      const opts = createNodeTransportOptions(true);
+      expect(opts.transport).toBeDefined();
+      expect(opts.transport?.target).toBe('pino-pretty');
+    });
+
+    it('returns pino-pretty transport when FORMAT_NODE_LOG is "true"', () => {
+      process.env.FORMAT_NODE_LOG = 'true';
+      const opts = createNodeTransportOptions(true);
+      expect(opts.transport).toBeDefined();
+      expect(opts.transport?.target).toBe('pino-pretty');
+    });
+
+    it('returns pino-pretty transport when FORMAT_NODE_LOG is "1"', () => {
+      process.env.FORMAT_NODE_LOG = '1';
+      const opts = createNodeTransportOptions(true);
+      expect(opts.transport).toBeDefined();
+      expect(opts.transport?.target).toBe('pino-pretty');
+    });
+
+    it('returns singleLine: true for concise one-liner output', () => {
+      delete process.env.FORMAT_NODE_LOG;
+      const opts = createNodeTransportOptions(true);
+      const popts = opts.transport?.options as Record<string, unknown>;
+      expect(popts?.singleLine).toBe(true);
+    });
+  });
+});

+ 76 - 0
packages/logger/src/transport-factory.ts

@@ -0,0 +1,76 @@
+import path from 'node:path';
+import { fileURLToPath } from 'node:url';
+import type { LoggerOptions, TransportSingleOptions } from 'pino';
+
+interface NodeTransportOptions {
+  transport?: TransportSingleOptions;
+}
+
+/**
+ * Returns whether FORMAT_NODE_LOG env var indicates formatted output.
+ * Formatted is the default (returns true when unset or truthy).
+ * Returns false only when explicitly set to 'false' or '0'.
+ */
+function isFormattedOutputEnabled(): boolean {
+  const val = process.env.FORMAT_NODE_LOG;
+  if (val === undefined || val === null) return true;
+  return val !== 'false' && val !== '0';
+}
+
+/**
+ * Create pino transport/options for Node.js environment.
+ * Development: bunyan-format custom transport with human-readable output.
+ * Production: raw JSON by default; standard pino-pretty when FORMAT_NODE_LOG is truthy.
+ */
+export function createNodeTransportOptions(
+  isProduction: boolean,
+): NodeTransportOptions {
+  if (!isProduction) {
+    // Development: use bunyan-format custom transport (dev only)
+    // Use path.join to resolve sibling module — avoids Vite's `new URL(…, import.meta.url)` asset transform
+    const thisDir = path.dirname(fileURLToPath(import.meta.url));
+    const bunyanFormatPath = path.join(thisDir, 'dev', 'bunyan-format.js');
+    return {
+      transport: {
+        target: bunyanFormatPath,
+      },
+    };
+  }
+
+  // Production: raw JSON unless FORMAT_NODE_LOG enables formatting
+  if (!isFormattedOutputEnabled()) {
+    return {};
+  }
+
+  return {
+    transport: {
+      target: 'pino-pretty',
+      options: {
+        translateTime: 'SYS:standard',
+        ignore: 'pid,hostname',
+        singleLine: true,
+      },
+    },
+  };
+}
+
+/**
+ * Create pino browser options.
+ * Development: uses the resolved namespace level.
+ * Production: defaults to 'error' level to minimize console noise.
+ */
+export function createBrowserOptions(
+  isProduction: boolean,
+): Partial<LoggerOptions> {
+  const browserOptions: Partial<LoggerOptions> = {
+    browser: {
+      asObject: false,
+    },
+  };
+
+  if (isProduction) {
+    return { ...browserOptions, level: 'error' };
+  }
+
+  return browserOptions;
+}

+ 22 - 0
packages/logger/src/types.ts

@@ -0,0 +1,22 @@
+import type { Logger as PinoLogger } from 'pino';
+
+/**
+ * Maps namespace patterns (with glob support) to log level strings.
+ * Must include a 'default' key as the fallback level.
+ * Example: { 'growi:service:*': 'debug', 'default': 'info' }
+ */
+export type LoggerConfig = {
+  default: string;
+  [namespacePattern: string]: string;
+};
+
+/**
+ * Options passed to initializeLoggerFactory().
+ */
+export interface LoggerFactoryOptions {
+  config: LoggerConfig;
+}
+
+// Re-export pino Logger type as Logger<string> so consumers can type-annotate without importing
+// pino directly, and so the type is compatible with pino-http's logger option.
+export type Logger = PinoLogger<string>;

+ 11 - 0
packages/logger/tsconfig.json

@@ -0,0 +1,11 @@
+{
+  "$schema": "http://json.schemastore.org/tsconfig",
+  "extends": "../../tsconfig.base.json",
+  "compilerOptions": {
+    "paths": {
+      "~/*": ["./src/*"]
+    },
+    "types": ["vitest/globals"]
+  },
+  "include": ["src"]
+}

+ 37 - 0
packages/logger/vite.config.ts

@@ -0,0 +1,37 @@
+import path from 'node:path';
+import glob from 'glob';
+import { nodeExternals } from 'rollup-plugin-node-externals';
+import { defineConfig } from 'vite';
+import dts from 'vite-plugin-dts';
+
+export default defineConfig({
+  plugins: [
+    dts({
+      copyDtsFiles: true,
+    }),
+    {
+      ...nodeExternals({
+        devDeps: true,
+        builtinsPrefix: 'ignore',
+      }),
+      enforce: 'pre',
+    },
+  ],
+  build: {
+    outDir: 'dist',
+    sourcemap: true,
+    lib: {
+      entry: glob.sync(path.resolve(__dirname, 'src/**/*.ts'), {
+        ignore: '**/*.spec.ts',
+      }),
+      name: 'logger',
+      formats: ['es', 'cjs'],
+    },
+    rollupOptions: {
+      output: {
+        preserveModules: true,
+        preserveModulesRoot: 'src',
+      },
+    },
+  },
+});

+ 11 - 0
packages/logger/vitest.config.ts

@@ -0,0 +1,11 @@
+import tsconfigPaths from 'vite-tsconfig-paths';
+import { defineConfig } from 'vitest/config';
+
+export default defineConfig({
+  plugins: [tsconfigPaths()],
+  test: {
+    environment: 'node',
+    clearMocks: true,
+    globals: true,
+  },
+});

+ 1 - 3
packages/remark-attachment-refs/package.json

@@ -45,19 +45,17 @@
   },
   "dependencies": {
     "@growi/core": "workspace:^",
+    "@growi/logger": "workspace:^",
     "@growi/remark-growi-directive": "workspace:^",
     "@growi/ui": "workspace:^",
     "axios": "^1.11.0",
-    "bunyan": "^1.8.15",
     "express": "^4.20.0",
     "hast-util-select": "^6.0.2",
     "mongoose": "^6.13.6",
     "swr": "^2.3.2",
-    "universal-bunyan": "^0.9.2",
     "xss": "^1.0.15"
   },
   "devDependencies": {
-    "@types/bunyan": "^1.8.11",
     "@types/hast": "^3.0.4",
     "@types/react": "^18.2.14",
     "@types/react-dom": "^18.2.6",

+ 2 - 3
packages/remark-attachment-refs/src/client/services/renderer/refs.ts

@@ -1,4 +1,5 @@
 import { pathUtils } from '@growi/core/dist/utils';
+import { loggerFactory } from '@growi/logger';
 import type {
   LeafGrowiPluginDirective,
   TextGrowiPluginDirective,
@@ -10,8 +11,6 @@ import { selectAll } from 'hast-util-select';
 import type { Plugin } from 'unified';
 import { visit } from 'unist-util-visit';
 
-import loggerFactory from '../../../utils/logger';
-
 const logger = loggerFactory(
   'growi:remark-attachment-refs:services:renderer:refs',
 );
@@ -104,7 +103,7 @@ export const remarkPlugin: Plugin = () => (tree) => {
         return;
       }
 
-      logger.debug('a node detected', attributes);
+      logger.debug({ attributes }, 'a node detected');
 
       // kebab case to camel case
       attributes.maxWidth = attributes['max-width'];

+ 1 - 2
packages/remark-attachment-refs/src/server/routes/refs.ts

@@ -3,14 +3,13 @@ import { SCOPE } from '@growi/core/dist/interfaces';
 import type { AccessTokenParser } from '@growi/core/dist/interfaces/server';
 import { serializeAttachmentSecurely } from '@growi/core/dist/models/serializers';
 import { OptionParser } from '@growi/core/dist/remark-plugins';
+import { loggerFactory } from '@growi/logger';
 import type { Request } from 'express';
 import { Router } from 'express';
 import type { HydratedDocument, Model } from 'mongoose';
 import mongoose, { model, Types } from 'mongoose';
 import { FilterXSS } from 'xss';
 
-import loggerFactory from '../../utils/logger';
-
 const logger = loggerFactory('growi:remark-attachment-refs:routes:refs');
 
 function generateRegexp(expression: string): RegExp {

Alguns ficheiros não foram mostrados porque muitos ficheiros mudaram neste diff