diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..182afc8 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,72 @@ +# Dependencies +node_modules/ +.pnpm-store/ + +# Build output (we rebuild in Docker) +dist/ +coverage/ + +# Environment files - NEVER include in images +.env +.env.* +!.env.example +.env.clients.json + +# Secrets and keys +*.pem +*.key +*.p12 +*.pfx +secrets.json +credentials.json +*-secrets.json +*-credentials.json + +# Editor and IDE +.vscode/ +.idea/ +*.swp +*.swo + +# macOS +.DS_Store + +# Git +.git/ +.gitignore + +# Documentation (not needed in production image) +*.md +!README.md +docs/ + +# Development files +.claude/ +CLAUDE.md +GEMINI.md + +# Logs +*.log +logs/ + +# Test files +**/*.test.ts +**/*.spec.ts +__tests__/ +test/ +tests/ + +# CI/CD configuration (not needed in image) +.github/ + +# Docker files (prevent recursive copying) +Dockerfile* +docker-compose*.yml +.dockerignore + +# Certificates (should be mounted, not baked in) +certs/ + +# Local development +caddyfile +out/ diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..63cdc18 --- /dev/null +++ b/.env.example @@ -0,0 +1,54 @@ +# Core Exchange Node Example - Environment Configuration +# ============================================================================= +# Copy this file to .env in the project root to configure the application. +# +# IMPORTANT SECURITY NOTES: +# - Never commit .env files to version control +# - Use different values for each environment (dev, staging, production) +# - Generate production secrets with: node scripts/secrets.js all +# - Store production secrets in a secure secret manager +# ============================================================================= + +# ===== SERVICE URLS ===== +# These URLs must match your Caddy/proxy configuration +OP_ISSUER=https://id.localtest.me +APP_HOST=https://app.localtest.me +APP_BASE_URL=https://app.localtest.me +API_HOST=https://api.localtest.me +API_BASE_URL=https://api.localtest.me + +# ===== SERVICE PORTS ===== +OP_PORT=3001 +APP_PORT=3004 +API_PORT=3003 + +# ===== OAUTH CLIENT CONFIGURATION ===== +# For development: Use these placeholder values +# For production: Generate with `node scripts/secrets.js client` +CLIENT_ID=dev-rp-CHANGE-FOR-PRODUCTION +CLIENT_SECRET=dev-secret-CHANGE-FOR-PRODUCTION +REDIRECT_URI=https://app.localtest.me/callback + +# ===== APPLICATION SECRETS ===== +# For development: Use this placeholder value +# For production: Generate with `node scripts/secrets.js secrets` +COOKIE_SECRET=dev-cookie-secret-CHANGE-FOR-PRODUCTION + +# ===== API CONFIGURATION ===== +API_AUDIENCE=api://my-api + +# ===== JWKS (Token Signing Keys) ===== +# For development: Leave commented (uses ephemeral keys - tokens invalidate on restart) +# For production: Generate with `node scripts/secrets.js jwks` +# JWKS='{"keys":[...]}' + +# ===== MULTIPLE OAUTH CLIENTS (Optional) ===== +# To register multiple OAuth clients, either: +# 1. Copy apps/auth/.env.clients.example.json to .env.clients.json +# 2. Or set this environment variable as a JSON array +# OIDC_CLIENTS=[{"client_id":"...","client_secret":"...","redirect_uris":["..."]}] + +# ===== LOGGING ===== +# Options: trace, debug, info, warn, error, fatal +# Use 'debug' for detailed OAuth flow logging during development +LOG_LEVEL=info diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..bd25bac --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,59 @@ +# Dependabot configuration for automated dependency updates +# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates + +version: 2 +updates: + # npm dependencies + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "weekly" + day: "monday" + time: "09:00" + timezone: "America/Los_Angeles" + open-pull-requests-limit: 10 + commit-message: + prefix: "deps" + labels: + - "dependencies" + - "automated" + # Group minor and patch updates together + groups: + production-dependencies: + patterns: + - "*" + exclude-patterns: + - "@types/*" + - "typescript" + - "eslint*" + - "@eslint/*" + - "@stylistic/*" + - "@typescript-eslint/*" + update-types: + - "minor" + - "patch" + dev-dependencies: + patterns: + - "@types/*" + - "typescript" + - "eslint*" + - "@eslint/*" + - "@stylistic/*" + - "@typescript-eslint/*" + update-types: + - "minor" + - "patch" + + # GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + day: "monday" + time: "09:00" + timezone: "America/Los_Angeles" + commit-message: + prefix: "ci" + labels: + - "ci" + - "automated" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..57d467d --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,94 @@ +name: CI + +on: + pull_request: + branches: [main] + push: + branches: [main] + +# Cancel in-progress runs for the same branch +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + lint: + name: Lint + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + run_install: false + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '22.x' + cache: 'pnpm' + cache-dependency-path: pnpm-lock.yaml + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Run ESLint + run: pnpm lint + + build: + name: Build + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + run_install: false + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '22.x' + cache: 'pnpm' + cache-dependency-path: pnpm-lock.yaml + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Build all packages + run: pnpm build + + security: + name: Security Audit + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + run_install: false + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '22.x' + cache: 'pnpm' + cache-dependency-path: pnpm-lock.yaml + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Run security audit + run: pnpm audit --audit-level=high + continue-on-error: true # Don't fail the build, but report issues + + - name: Check for known vulnerabilities + run: | + echo "## Security Audit Results" >> $GITHUB_STEP_SUMMARY + pnpm audit --audit-level=moderate 2>&1 | head -100 >> $GITHUB_STEP_SUMMARY || true diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml new file mode 100644 index 0000000..fd1429c --- /dev/null +++ b/.github/workflows/security.yml @@ -0,0 +1,132 @@ +name: Security Scan + +on: + schedule: + # Run weekly on Monday at 9 AM UTC + - cron: '0 9 * * 1' + push: + branches: [main] + paths: + - 'pnpm-lock.yaml' + - '**/package.json' + workflow_dispatch: + +permissions: + contents: read + security-events: write + +jobs: + dependency-audit: + name: Dependency Audit + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + run_install: false + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '22.x' + cache: 'pnpm' + cache-dependency-path: pnpm-lock.yaml + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Run npm audit + run: | + echo "## Dependency Audit Report" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "### High and Critical Vulnerabilities" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + pnpm audit --audit-level=high 2>&1 | tee audit-high.txt || true + cat audit-high.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + # Fail if there are high or critical vulnerabilities + if grep -q "found [1-9]" audit-high.txt; then + echo "::warning::Security vulnerabilities found. Review the audit report." + fi + + - name: Check for outdated packages + run: | + echo "" >> $GITHUB_STEP_SUMMARY + echo "### Outdated Packages" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + pnpm outdated 2>&1 | head -50 >> $GITHUB_STEP_SUMMARY || true + echo '```' >> $GITHUB_STEP_SUMMARY + + codeql: + name: CodeQL Analysis + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: javascript-typescript + queries: security-and-quality + + - name: Autobuild + uses: github/codeql-action/autobuild@v3 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:javascript-typescript" + + docker-scan: + name: Docker Image Scan + runs-on: ubuntu-latest + strategy: + matrix: + service: [auth, api, app] + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build image for scanning + uses: docker/build-push-action@v5 + with: + context: . + file: apps/${{ matrix.service }}/Dockerfile + push: false + load: true + tags: core-exchange-${{ matrix.service }}:scan + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: 'core-exchange-${{ matrix.service }}:scan' + format: 'sarif' + output: 'trivy-results-${{ matrix.service }}.sarif' + severity: 'CRITICAL,HIGH' + + - name: Upload Trivy scan results + uses: github/codeql-action/upload-sarif@v3 + if: always() + with: + sarif_file: 'trivy-results-${{ matrix.service }}.sarif' + + - name: Trivy summary + uses: aquasecurity/trivy-action@master + with: + image-ref: 'core-exchange-${{ matrix.service }}:scan' + format: 'table' + severity: 'CRITICAL,HIGH,MEDIUM' diff --git a/.gitignore b/.gitignore index 85c3239..e9961aa 100644 --- a/.gitignore +++ b/.gitignore @@ -6,16 +6,38 @@ node_modules/ dist/ coverage/ -# env +# env - NEVER commit actual environment files .env +.env.local .env.prod +.env.production +.env.staging +.env.development +.env.*.local .env.clients.json apps/**/.env +apps/**/.env.local apps/**/.env.prod +apps/**/.env.production +apps/**/.env.staging packages/**/.env +packages/**/.env.local packages/**/.env.prod .config +# secrets and keys - NEVER commit these +*.pem +*.key +*.p12 +*.pfx +*.jks +*.keystore +secrets.json +credentials.json +service-account*.json +*-credentials.json +*-secrets.json + # editor .vscode/ .idea/ @@ -33,5 +55,9 @@ certs/* # claude .claude/ +# docker - exclude production compose (contains env-specific config) +docker-compose.prod.yml +caddyfile.prod + # misc apps/**/corex.yaml diff --git a/CLAUDE.md b/CLAUDE.md index aa2cac9..aaa4e5e 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -61,7 +61,7 @@ The current `caddyfile` configuration routes traffic as follows: If you prefer not to use sudo, you can modify the `caddyfile` to use high ports: -``` +```bash :8443 { tls internal reverse_proxy localhost:3001 @@ -249,6 +249,206 @@ The authorization server uses JWKS (JSON Web Key Set) to sign JWT tokens: **Configuration location:** `apps/auth/src/index.ts` (lines 68-89) loads JWKS from environment and logs warnings if not set +## Sensitive Data Handling + +This project implements security best practices for handling sensitive configuration and credentials. + +### Environment Variables + +All sensitive configuration is managed through environment variables: + +| Variable | Purpose | Security Level | +| -------- | ------- | -------------- | +| `CLIENT_SECRET` | OAuth client authentication | High - Never commit | +| `COOKIE_SECRET` | Session cookie signing | High - Never commit | +| `JWKS` | Token signing keys (contains private key) | Critical - Never commit | +| `OIDC_CLIENTS` | Multiple client configurations | High - Never commit | + +### Template Configuration Files + +The project provides `.env.example` templates at multiple levels: + +- **Root level**: `.env.example` - Complete configuration template +- **Auth server**: `apps/auth/.env.example` - Authorization server configuration +- **API server**: `apps/api/.env.example` - Resource server configuration +- **Client app**: `apps/app/.env.example` - Relying party configuration +- **Multiple clients**: `apps/auth/.env.clients.example.json` - Multi-client template + +To set up a new environment: + +```bash +# Copy the root template +cp .env.example .env + +# Or copy individual app templates +cp apps/auth/.env.example apps/auth/.env +cp apps/api/.env.example apps/api/.env +cp apps/app/.env.example apps/app/.env +``` + +### Generating Production Secrets + +Use the built-in secrets CLI to generate cryptographically secure values: + +```bash +# Generate all secrets at once +node scripts/secrets.js all + +# Or generate individual components +node scripts/secrets.js client # OAuth client credentials +node scripts/secrets.js secrets # Application secrets (COOKIE_SECRET) +node scripts/secrets.js jwks # Token signing keys +``` + +### Gitignore Protection + +The `.gitignore` is configured to prevent accidental commits of sensitive files: + +- `.env`, `.env.local`, `.env.prod`, `.env.production`, `.env.staging` +- `.env.clients.json` (actual client configurations) +- `*.pem`, `*.key`, `*.p12`, `*.pfx` (certificate/key files) +- `secrets.json`, `credentials.json`, `*-secrets.json` +- `service-account*.json` (cloud provider credentials) + +### Security Checklist + +Before deploying to production: + +1. **Generate new secrets**: Run `node scripts/secrets.js all` and use the output +2. **Never use dev values**: Replace all `*-CHANGE-FOR-PRODUCTION` placeholders +3. **Use a secret manager**: Store secrets in AWS Secrets Manager, HashiCorp Vault, or similar +4. **Rotate regularly**: Establish a secret rotation schedule +5. **Audit access**: Limit who can access production secrets +6. **Monitor for leaks**: Use tools like git-secrets or truffleHog to scan for exposed credentials + +### Demo Credentials + +The following credentials are intentionally hardcoded for **development/demo purposes only**: + +- Test user: `user@example.test` / `passw0rd!` +- Blocked user: `blocked@example.test` / `passw0rd!` + +These are stored in-memory in `apps/auth/src/index.ts` and should be replaced with a proper user database and password hashing (bcrypt/argon2) for production use. + +## CI/CD and Build Processes + +This project includes automated CI/CD pipelines and containerization support for secure, repeatable deployments. + +### GitHub Actions Workflows + +| Workflow | Trigger | Purpose | +| -------- | ------- | ------- | +| `ci.yml` | PRs, push to main | Lint, build, security audit | +| `security.yml` | Weekly, dependency changes | CodeQL analysis, Docker image scanning | +| `deploy-*.yml` | Push to paths | Deploy individual services to VM | + +#### CI Workflow (`ci.yml`) + +Runs on every pull request and push to main: + +```bash +# Jobs run in parallel: +- Lint # ESLint validation +- Build # TypeScript compilation + CSS build +- Security # npm audit for vulnerabilities +``` + +#### Security Workflow (`security.yml`) + +Comprehensive security scanning: + +- **Dependency Audit**: Weekly npm audit for known vulnerabilities +- **CodeQL Analysis**: Static analysis for security issues +- **Docker Image Scan**: Trivy scanner for container vulnerabilities + +### Dependabot + +Automated dependency updates via `.github/dependabot.yml`: + +- **npm packages**: Weekly updates, grouped by type (production vs dev) +- **GitHub Actions**: Weekly updates for workflow actions + +### Docker Support + +Each service has a production-ready Dockerfile with multi-stage builds: + +```bash +# Build individual images +docker build -f apps/auth/Dockerfile -t core-exchange-auth . +docker build -f apps/api/Dockerfile -t core-exchange-api . +docker build -f apps/app/Dockerfile -t core-exchange-app . + +# Or use docker-compose +docker compose up --build +``` + +**Dockerfile features**: + +- Multi-stage builds for minimal image size +- Non-root user for security +- Health checks for container orchestration +- Production-only dependencies + +### Docker Compose + +Two compose configurations are provided: + +| File | Purpose | +| ---- | ------- | +| `docker-compose.yml` | Local development with Docker | +| `docker-compose.prod.example.yml` | Production template (copy and customize) | + +```bash +# Development +docker compose up --build + +# Production (after customizing) +cp docker-compose.prod.example.yml docker-compose.prod.yml +# Edit docker-compose.prod.yml with production values +docker compose -f docker-compose.prod.yml up -d +``` + +### Build Commands + +```bash +# Install dependencies (frozen lockfile for reproducibility) +pnpm install --frozen-lockfile + +# Build all services +pnpm build + +# Build individual services +pnpm --filter @apps/shared build +pnpm --filter @apps/auth build +pnpm --filter @apps/api build +pnpm --filter @apps/app build + +# Run linting +pnpm lint +pnpm lint:fix +``` + +### Deployment Security Checklist + +Before deploying to production: + +1. **Use frozen lockfile**: Always `pnpm install --frozen-lockfile` +2. **Pin Node.js version**: Use exact version (22.x) in CI and Dockerfiles +3. **Run security audit**: `pnpm audit --audit-level=high` +4. **Build verification**: Ensure `pnpm build` succeeds before deployment +5. **Use secrets management**: Never commit secrets; use environment variables or secret managers +6. **Enable branch protection**: Require PR reviews and passing CI checks +7. **Scan Docker images**: Use Trivy or similar before pushing to registry + +### Branch Protection (Recommended) + +Configure these settings on your main branch: + +- Require pull request reviews before merging +- Require status checks to pass (CI workflow) +- Require branches to be up to date before merging +- Do not allow bypassing the above settings + ## Testing the Flow 1. Visit `https://id.localtest.me/.well-known/openid-configuration` to verify the Auth server is running diff --git a/README.md b/README.md index e3b0c62..8da27d0 100644 --- a/README.md +++ b/README.md @@ -471,6 +471,84 @@ Check out `apps/auth/.env.clients.example.json` for a complete example, then cop If you change `OP_ISSUER` or ports, remember to update the client registration (especially redirect URIs) and restart everything. +## Security + +This reference implementation includes security best practices that you should carry forward into your own implementations. See [SECURITY.md](SECURITY.md) for our security policy and vulnerability reporting guidelines. + +### Security Features + +| Feature | Implementation | +| ------- | -------------- | +| **Input Validation** | Zod schemas validate all external inputs with allow-list approach | +| **Security Headers** | Helmet.js sets secure HTTP headers (CSP, HSTS, X-Frame-Options, etc.) | +| **HTTPS Required** | All services communicate over TLS via Caddy | +| **Secure Cookies** | Tokens stored in HTTP-only, secure, same-site cookies | +| **Password Security** | Timing-safe comparison prevents timing attacks | +| **JWT Validation** | Access tokens validated against JWKS with proper audience/issuer checks | + +### Dependency Security + +We use automated tools to keep dependencies secure: + +- **Dependabot**: Weekly automated PRs for dependency updates +- **npm audit**: Security vulnerability scanning in CI +- **CodeQL**: Static analysis for security issues +- **Trivy**: Container image vulnerability scanning + +### What You Should Add for Production + +This demo intentionally omits some production requirements: + +- **Rate limiting** - Prevent brute force and DoS attacks +- **Audit logging** - Track authentication events and API access +- **Account lockout** - Lock accounts after repeated failed login attempts +- **Password hashing** - Replace demo passwords with bcrypt/Argon2 hashed passwords +- **Persistent storage** - Replace in-memory stores with a database + +## Docker and CI/CD + +This project includes containerization and CI/CD support for secure, repeatable deployments. + +### Docker + +Each service has a production-ready Dockerfile with multi-stage builds: + +```bash +# Build and run all services +docker compose up --build + +# Or build individual images +docker build -f apps/auth/Dockerfile -t core-exchange-auth . +docker build -f apps/api/Dockerfile -t core-exchange-api . +docker build -f apps/app/Dockerfile -t core-exchange-app . +``` + +**Dockerfile features:** + +- Multi-stage builds for minimal image size +- Non-root user for security +- Health checks for orchestration +- Production dependencies only + +See `docker-compose.yml` for local development and `docker-compose.prod.example.yml` as a production template. + +### GitHub Actions + +| Workflow | Purpose | +| -------- | ------- | +| `ci.yml` | Lint, build, and security audit on PRs | +| `security.yml` | Weekly CodeQL and container scanning | +| `deploy-*.yml` | Deploy services to infrastructure | + +### Recommended Branch Protection + +Configure these settings on your main branch: + +- Require pull request reviews before merging +- Require status checks to pass (CI workflow) +- Require branches to be up to date +- Enable Dependabot security alerts + ## JWT Access Tokens with Resource Indicators (RFC 8707) We use **Resource Indicators for OAuth 2.0 (RFC 8707)** to issue **JWT access tokens** instead of opaque tokens. This matters if your API needs to validate tokens locally without a callback to the auth server. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..f74b4af --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,116 @@ +# Security Policy + +## Supported Versions + +This is a reference implementation intended for educational and development purposes. We recommend always using the latest version. + +| Version | Supported | +| -------- | ------------------ | +| latest | :white_check_mark: | +| < latest | :x: | + +## Reporting a Vulnerability + +We take security seriously. If you discover a security vulnerability in this project, please report it responsibly. + +### How to Report + +1. **Do NOT open a public GitHub issue** for security vulnerabilities +2. Email security concerns to the repository maintainers +3. Include as much detail as possible: + - Description of the vulnerability + - Steps to reproduce + - Potential impact + - Suggested fix (if any) + +### What to Expect + +- **Acknowledgment**: We will acknowledge receipt within 48 hours +- **Assessment**: We will assess the vulnerability and determine its severity +- **Resolution**: We aim to address critical vulnerabilities within 7 days +- **Disclosure**: We will coordinate disclosure timing with you + +### Scope + +This security policy applies to: + +- The core application code in this repository +- Configuration examples and templates +- Documentation that could lead to security issues if followed incorrectly + +Out of scope: + +- Third-party dependencies (report to their maintainers) +- Issues in development/demo credentials (these are intentionally simple) +- Theoretical vulnerabilities without proof of concept + +## Security Features + +This reference implementation includes several security features: + +### Authentication & Authorization + +- OpenID Connect with PKCE +- JWT access tokens with proper validation +- Secure token storage in HTTP-only cookies +- Timing-safe password comparison + +### Input Validation + +- Zod schema validation for all external inputs +- Allow-list approach for API endpoints +- Bounds checking on pagination parameters +- Path parameter sanitization + +### Infrastructure Security + +- Helmet.js for security headers +- HTTPS required (via Caddy) +- Non-root Docker containers +- Frozen lockfiles for reproducible builds + +### CI/CD Security + +- Automated dependency updates (Dependabot) +- Security scanning (npm audit, CodeQL, Trivy) +- Branch protection recommendations + +## Security Best Practices for Implementers + +If you're using this as a reference for your own implementation: + +1. **Never use development credentials in production** + - Generate new secrets: `node scripts/secrets.js all` + - Store secrets in a proper secret manager + +2. **Replace the in-memory user store** + - Use a proper database + - Hash passwords with bcrypt or Argon2 + - Implement account lockout after failed attempts + +3. **Enable all security features** + - Configure branch protection + - Enable Dependabot alerts + - Set up security scanning workflows + +4. **Keep dependencies updated** + - Review Dependabot PRs promptly + - Run `pnpm audit` regularly + - Subscribe to security advisories for key dependencies + +5. **Implement additional production controls** + - Rate limiting + - Audit logging + - Intrusion detection + - Regular security assessments + +## Known Limitations + +This is a reference implementation with intentional simplifications: + +- **In-memory storage**: Sessions and grants don't persist across restarts +- **Demo credentials**: Hardcoded test users for demonstration +- **No rate limiting**: Should be added for production use +- **Simplified error handling**: Production should have more granular error responses + +These are documented to help implementers understand what needs to be enhanced for production use. diff --git a/apps/api/Dockerfile b/apps/api/Dockerfile new file mode 100644 index 0000000..9120de1 --- /dev/null +++ b/apps/api/Dockerfile @@ -0,0 +1,74 @@ +# Resource Server (API) Dockerfile +# Multi-stage build for optimal image size and security + +# ============================================================================= +# Stage 1: Build +# ============================================================================= +FROM node:22-alpine AS builder + +# Install pnpm +RUN corepack enable && corepack prepare pnpm@latest --activate + +WORKDIR /app + +# Copy workspace configuration +COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./ + +# Copy app and shared package files +COPY apps/api/package.json ./apps/api/ +COPY apps/shared/package.json ./apps/shared/ + +# Install dependencies +RUN pnpm install --frozen-lockfile + +# Copy source code +COPY apps/shared ./apps/shared +COPY apps/api ./apps/api +COPY tsconfig.base.json ./ + +# Build shared library first, then api +RUN pnpm --filter @apps/shared build +RUN pnpm --filter @apps/api build + +# ============================================================================= +# Stage 2: Production +# ============================================================================= +FROM node:22-alpine AS production + +# Install pnpm for deploy command +RUN corepack enable && corepack prepare pnpm@latest --activate + +# Create non-root user for security +RUN addgroup -g 1001 -S nodejs && \ + adduser -S nodejs -u 1001 + +WORKDIR /app + +# Copy workspace files +COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./ +COPY apps/api/package.json ./apps/api/ +COPY apps/shared/package.json ./apps/shared/ + +# Install production dependencies only +RUN pnpm install --frozen-lockfile --prod + +# Copy built artifacts from builder stage +COPY --from=builder /app/apps/shared/dist ./apps/shared/dist +COPY --from=builder /app/apps/api/dist ./apps/api/dist + +# Switch to non-root user +USER nodejs + +# Expose port +EXPOSE 3003 + +# Set environment defaults +ENV NODE_ENV=production +ENV API_PORT=3003 + +# Health check +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3003/health || exit 1 + +# Start the application +CMD ["node", "apps/api/dist/index.js"] diff --git a/apps/api/src/routes/accounts.ts b/apps/api/src/routes/accounts.ts index cfa2ff3..6972ee6 100644 --- a/apps/api/src/routes/accounts.ts +++ b/apps/api/src/routes/accounts.ts @@ -1,7 +1,16 @@ import express, { Request, Response } from "express"; import { getAccounts, getAccountById, getAccountContactById, getAccountStatements, getAccountStatementById, getAccountTransactions, getPaymentNetworks, getAssetTransferNetworks } from "../data/accountsRepository.js"; -import { isValidDate } from "../utils/validation.js"; import pino from "pino"; +import { + paginationSchema, + dateRangePaginationSchema, + accountIdSchema, + statementIdSchema, + sanitizeForLogging, + formatZodError, + type PaginationParams, + type DateRangePaginationParams +} from "@apps/shared/validation"; const logger = pino( { transport: { @@ -14,28 +23,53 @@ const logger = pino( { const router = express.Router(); -interface AccountsQueryParams { - offset?: string; - limit?: string; +/** + * Validate and parse pagination query parameters. + * Returns validated params with bounds checking applied. + */ +function validatePagination( query: Record ): PaginationParams { + const result = paginationSchema.safeParse( query ); + if ( !result.success ) { + // Return defaults if validation fails + return { offset: 0, limit: 100 }; + } + return result.data; } -interface AccountStatementsQueryParams { - offset?: string; - limit?: string; - startTime?: string; - endTime?: string; +/** + * Validate and parse date range with pagination query parameters. + * Returns null with error details if validation fails. + */ +function validateDateRangePagination( query: Record ): { success: true; data: DateRangePaginationParams } | { success: false; error: string } { + const result = dateRangePaginationSchema.safeParse( query ); + if ( !result.success ) { + return { success: false, error: formatZodError( result.error ) }; + } + return { success: true, data: result.data }; } -interface AccountTransactionsQueryParams { - offset?: string; - limit?: string; - startTime?: string; - endTime?: string; +/** + * Validate account ID path parameter. + * Returns null with error details if validation fails. + */ +function validateAccountId( accountId: string ): { success: true; data: string } | { success: false; error: string } { + const result = accountIdSchema.safeParse( accountId ); + if ( !result.success ) { + return { success: false, error: formatZodError( result.error ) }; + } + return { success: true, data: result.data }; } -interface PaymentNetworksQueryParams { - offset?: string; - limit?: string; +/** + * Validate statement ID path parameter. + * Returns null with error details if validation fails. + */ +function validateStatementId( statementId: string ): { success: true; data: string } | { success: false; error: string } { + const result = statementIdSchema.safeParse( statementId ); + if ( !result.success ) { + return { success: false, error: formatZodError( result.error ) }; + } + return { success: true, data: result.data }; } // Shared helper to validate account existence and send appropriate HTTP responses @@ -56,10 +90,9 @@ async function verifyAccount( accountId: string, res: Response, notFoundCode = 7 } // GET /accounts with pagination support -router.get( "/accounts", async ( req: Request<{}, {}, {}, AccountsQueryParams>, res: Response ) => { - // Extract pagination parameters from query string with defaults - const offset = parseInt( req.query.offset as string ) || 0; - const limit = parseInt( req.query.limit as string ) || 100; +router.get( "/accounts", async ( req: Request, res: Response ) => { + // Validate and extract pagination parameters with bounds checking + const { offset, limit } = validatePagination( req.query ); try { // Get accounts using the repository @@ -83,7 +116,12 @@ router.get( "/accounts", async ( req: Request<{}, {}, {}, AccountsQueryParams>, } ); router.get( "/accounts/:accountId", async ( req: Request<{ accountId: string }>, res: Response ) => { - const { accountId } = req.params; + // Validate accountId path parameter + const accountIdResult = validateAccountId( req.params.accountId ); + if ( !accountIdResult.success ) { + return res.status( 400 ).json( { error: "Validation failed", details: accountIdResult.error } ); + } + const accountId = accountIdResult.data; try { const account = await getAccountById( accountId ); @@ -93,14 +131,19 @@ router.get( "/accounts/:accountId", async ( req: Request<{ accountId: string }>, } res.json( account ); - } catch ( error ) { - logger.error( error, "Error retrieving account" ); + } catch { + logger.error( { accountId: sanitizeForLogging( accountId ) }, "Error retrieving account" ); res.status( 500 ).json( { error: "Internal server error" } ); } } ); router.get( "/accounts/:accountId/contact", async ( req: Request<{ accountId: string }>, res: Response ) => { - const { accountId } = req.params; + // Validate accountId path parameter + const accountIdResult = validateAccountId( req.params.accountId ); + if ( !accountIdResult.success ) { + return res.status( 400 ).json( { error: "Validation failed", details: accountIdResult.error } ); + } + const accountId = accountIdResult.data; const account = await verifyAccount( accountId, res, 701 ); if ( !account ) return; @@ -113,34 +156,33 @@ router.get( "/accounts/:accountId/contact", async ( req: Request<{ accountId: st } res.json( contact ); - } catch ( error ) { - logger.error( error, "Error retrieving account contact" ); + } catch { + logger.error( { accountId: sanitizeForLogging( accountId ) }, "Error retrieving account contact" ); res.status( 500 ).json( { error: "Internal server error" } ); } } ); // GET /accounts/:accountId/statements with pagination support -router.get( "/accounts/:accountId/statements", async ( req: Request<{ accountId: string }, {}, {}, AccountStatementsQueryParams>, res: Response ) => { - // Extract params and pagination from query string with defaults - const { accountId } = req.params; - const offset = parseInt( req.query.offset as string ) || 0; - const limit = parseInt( req.query.limit as string ) || 100; - const startTime = req.query.startTime || ""; - const endTime = req.query.endTime || ""; - - const account = await verifyAccount( accountId, res, 701 ); - if ( !account ) return; - - if ( !isValidDate( startTime ) || !isValidDate( endTime ) ) { - return res.status( 400 ).json( { error: "Invalid date format for startTime or endTime" } ); +router.get( "/accounts/:accountId/statements", async ( req: Request<{ accountId: string }>, res: Response ) => { + // Validate accountId path parameter + const accountIdResult = validateAccountId( req.params.accountId ); + if ( !accountIdResult.success ) { + return res.status( 400 ).json( { error: "Validation failed", details: accountIdResult.error } ); } + const accountId = accountIdResult.data; - if ( startTime && endTime && new Date( startTime ) > new Date( endTime ) ) { - return res.status( 400 ).json( { error: "startTime must be before or equal to endTime" } ); + // Validate query parameters including date range and pagination + const queryResult = validateDateRangePagination( req.query ); + if ( !queryResult.success ) { + return res.status( 400 ).json( { error: "Validation failed", details: queryResult.error } ); } + const { offset, limit, startTime, endTime } = queryResult.data; + + const account = await verifyAccount( accountId, res, 701 ); + if ( !account ) return; try { - const result = await getAccountStatements( accountId, offset, limit, startTime, endTime ); + const result = await getAccountStatements( accountId, offset, limit, startTime || "", endTime || "" ); // Calculate pagination metadata const hasMore = offset + limit < result.total; @@ -153,15 +195,27 @@ router.get( "/accounts/:accountId/statements", async ( req: Request<{ accountId: }; res.json( response ); - } catch ( error ) { - logger.error( error, "Error retrieving accounts" ); + } catch { + logger.error( { accountId: sanitizeForLogging( accountId ) }, "Error retrieving statements" ); res.status( 500 ).json( { error: "Internal server error" } ); } } ); // GET /accounts/:accountId/statements/:statementId - simulate returning a PDF router.get( "/accounts/:accountId/statements/:statementId", async ( req: Request<{ accountId: string; statementId: string }>, res: Response ) => { - const { accountId, statementId } = req.params; + // Validate accountId path parameter + const accountIdResult = validateAccountId( req.params.accountId ); + if ( !accountIdResult.success ) { + return res.status( 400 ).json( { error: "Validation failed", details: accountIdResult.error } ); + } + const accountId = accountIdResult.data; + + // Validate statementId path parameter + const statementIdResult = validateStatementId( req.params.statementId ); + if ( !statementIdResult.success ) { + return res.status( 400 ).json( { error: "Validation failed", details: statementIdResult.error } ); + } + const statementId = statementIdResult.data; try { const account = await verifyAccount( accountId, res, 701 ); @@ -180,50 +234,56 @@ router.get( "/accounts/:accountId/statements/:statementId", async ( req: Request res.setHeader( "Content-Disposition", `inline; filename=statement-${ statementId }.pdf` ); res.setHeader( "Content-Length", buffer.length.toString() ); return res.status( 200 ).send( buffer ); - } catch ( error ) { - logger.error( error, "Error retrieving statement PDF" ); + } catch { + logger.error( { accountId: sanitizeForLogging( accountId ), statementId: sanitizeForLogging( statementId ) }, "Error retrieving statement PDF" ); return res.status( 500 ).json( { error: "Internal server error" } ); } } ); // GET /accounts/:accountId/transactions with pagination support -router.get( "/accounts/:accountId/transactions", async ( req: Request<{ accountId: string }, {}, {}, AccountTransactionsQueryParams>, res: Response ) => { - const { accountId } = req.params; - const offset = parseInt( req.query.offset as string ) || 0; - const limit = parseInt( req.query.limit as string ) || 100; - const startTime = req.query.startTime || ""; - const endTime = req.query.endTime || ""; - - const account = await verifyAccount( accountId, res, 701 ); - if ( !account ) return; - - if ( !isValidDate( startTime ) || !isValidDate( endTime ) ) { - return res.status( 400 ).json( { error: "Invalid date format for startTime or endTime" } ); +router.get( "/accounts/:accountId/transactions", async ( req: Request<{ accountId: string }>, res: Response ) => { + // Validate accountId path parameter + const accountIdResult = validateAccountId( req.params.accountId ); + if ( !accountIdResult.success ) { + return res.status( 400 ).json( { error: "Validation failed", details: accountIdResult.error } ); } + const accountId = accountIdResult.data; - if ( startTime && endTime && new Date( startTime ) > new Date( endTime ) ) { - return res.status( 400 ).json( { error: "startTime must be before or equal to endTime" } ); + // Validate query parameters including date range and pagination + const queryResult = validateDateRangePagination( req.query ); + if ( !queryResult.success ) { + return res.status( 400 ).json( { error: "Validation failed", details: queryResult.error } ); } + const { offset, limit, startTime, endTime } = queryResult.data; + + const account = await verifyAccount( accountId, res, 701 ); + if ( !account ) return; try { - const result = await getAccountTransactions( accountId, offset, limit, startTime, endTime ); + const result = await getAccountTransactions( accountId, offset, limit, startTime || "", endTime || "" ); const hasMore = offset + limit < result.total; const page = hasMore ? { nextOffset: String( offset + limit ) } : {}; return res.json( { page, transactions: result.transactions } ); - } catch ( error ) { - logger.error( error, "Error retrieving transactions" ); + } catch { + logger.error( { accountId: sanitizeForLogging( accountId ) }, "Error retrieving transactions" ); return res.status( 500 ).json( { error: "Internal server error" } ); } } ); // GET /accounts/:accountId/payment-networks with pagination support -router.get( "/accounts/:accountId/payment-networks", async ( req: Request<{ accountId: string }, {}, {}, PaymentNetworksQueryParams>, res: Response ) => { - const { accountId } = req.params; - const offset = parseInt( req.query.offset as string ) || 0; - const limit = parseInt( req.query.limit as string ) || 100; +router.get( "/accounts/:accountId/payment-networks", async ( req: Request<{ accountId: string }>, res: Response ) => { + // Validate accountId path parameter + const accountIdResult = validateAccountId( req.params.accountId ); + if ( !accountIdResult.success ) { + return res.status( 400 ).json( { error: "Validation failed", details: accountIdResult.error } ); + } + const accountId = accountIdResult.data; + + // Validate pagination parameters with bounds checking + const { offset, limit } = validatePagination( req.query ); const account = await verifyAccount( accountId, res, 701 ); if ( !account ) return; @@ -243,17 +303,23 @@ router.get( "/accounts/:accountId/payment-networks", async ( req: Request<{ acco }; res.json( response ); - } catch ( error ) { - logger.error( error, "Error retrieving accounts" ); + } catch { + logger.error( { accountId: sanitizeForLogging( accountId ) }, "Error retrieving payment networks" ); res.status( 500 ).json( { error: "Internal server error" } ); } } ); // GET /accounts/:accountId/asset-transfer-networks with pagination support -router.get( "/accounts/:accountId/asset-transfer-networks", async ( req: Request<{ accountId: string }, {}, {}, PaymentNetworksQueryParams>, res: Response ) => { - const { accountId } = req.params; - const offset = parseInt( req.query.offset as string ) || 0; - const limit = parseInt( req.query.limit as string ) || 100; +router.get( "/accounts/:accountId/asset-transfer-networks", async ( req: Request<{ accountId: string }>, res: Response ) => { + // Validate accountId path parameter + const accountIdResult = validateAccountId( req.params.accountId ); + if ( !accountIdResult.success ) { + return res.status( 400 ).json( { error: "Validation failed", details: accountIdResult.error } ); + } + const accountId = accountIdResult.data; + + // Validate pagination parameters with bounds checking + const { offset, limit } = validatePagination( req.query ); const account = await verifyAccount( accountId, res, 701 ); if ( !account ) return; @@ -266,8 +332,8 @@ router.get( "/accounts/:accountId/asset-transfer-networks", async ( req: Request page, assetTransferNetworks: result.assetTransferNetworks } ); - } catch ( error ) { - logger.error( error, "Error retrieving asset transfer networks" ); + } catch { + logger.error( { accountId: sanitizeForLogging( accountId ) }, "Error retrieving asset transfer networks" ); return res.status( 500 ).json( { error: "Internal server error" } ); } } ); diff --git a/apps/app/Dockerfile b/apps/app/Dockerfile new file mode 100644 index 0000000..b76f3c9 --- /dev/null +++ b/apps/app/Dockerfile @@ -0,0 +1,76 @@ +# Client Application (Relying Party) Dockerfile +# Multi-stage build for optimal image size and security + +# ============================================================================= +# Stage 1: Build +# ============================================================================= +FROM node:22-alpine AS builder + +# Install pnpm +RUN corepack enable && corepack prepare pnpm@latest --activate + +WORKDIR /app + +# Copy workspace configuration +COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./ + +# Copy app and shared package files +COPY apps/app/package.json ./apps/app/ +COPY apps/shared/package.json ./apps/shared/ + +# Install dependencies +RUN pnpm install --frozen-lockfile + +# Copy source code +COPY apps/shared ./apps/shared +COPY apps/app ./apps/app +COPY tsconfig.base.json ./ + +# Build shared library first, then app +RUN pnpm --filter @apps/shared build +RUN pnpm --filter @apps/app build + +# ============================================================================= +# Stage 2: Production +# ============================================================================= +FROM node:22-alpine AS production + +# Install pnpm for deploy command +RUN corepack enable && corepack prepare pnpm@latest --activate + +# Create non-root user for security +RUN addgroup -g 1001 -S nodejs && \ + adduser -S nodejs -u 1001 + +WORKDIR /app + +# Copy workspace files +COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./ +COPY apps/app/package.json ./apps/app/ +COPY apps/shared/package.json ./apps/shared/ + +# Install production dependencies only +RUN pnpm install --frozen-lockfile --prod + +# Copy built artifacts from builder stage +COPY --from=builder /app/apps/shared/dist ./apps/shared/dist +COPY --from=builder /app/apps/app/dist ./apps/app/dist +COPY --from=builder /app/apps/app/public ./apps/app/public +COPY --from=builder /app/apps/app/views ./apps/app/views + +# Switch to non-root user +USER nodejs + +# Expose port +EXPOSE 3004 + +# Set environment defaults +ENV NODE_ENV=production +ENV APP_PORT=3004 + +# Health check +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3004/ || exit 1 + +# Start the application +CMD ["node", "apps/app/dist/index.js"] diff --git a/apps/app/public/styles.css b/apps/app/public/styles.css index 05107ae..dbb2e83 100644 --- a/apps/app/public/styles.css +++ b/apps/app/public/styles.css @@ -1,4 +1,4 @@ -/*! tailwindcss v4.1.15 | MIT License | https://tailwindcss.com */ +/*! tailwindcss v4.1.17 | MIT License | https://tailwindcss.com */ @layer properties; @layer theme, base, components, utilities; @layer theme { @@ -169,9 +169,7 @@ ::placeholder { color: currentcolor; @supports (color: color-mix(in lab, red, red)) { - & { - color: color-mix(in oklab, currentcolor 50%, transparent); - } + color: color-mix(in oklab, currentcolor 50%, transparent); } } } diff --git a/apps/app/src/index.ts b/apps/app/src/index.ts index 466ce2b..ead6cce 100644 --- a/apps/app/src/index.ts +++ b/apps/app/src/index.ts @@ -14,6 +14,14 @@ import { setupBasicExpress, setupEJSTemplates } from "@apps/shared"; +import { + apiCallSchema, + tokenSetSchema, + safeJsonParse, + escapeHtml, + sanitizeForLogging, + formatZodError +} from "@apps/shared/validation"; // Polyfill for crypto global in Node.js if ( !globalThis.crypto ) { @@ -73,6 +81,44 @@ let config: client.Configuration | undefined; let configInitPromise: Promise | null = null; let jwks: ReturnType | undefined; +/** + * Safely parse and validate the tokens cookie. + * Returns null if parsing fails or validation fails. + */ +function parseTokensCookie( cookieValue: string | undefined ): TokenSet | null { + if ( !cookieValue ) return null; + + const result = safeJsonParse( cookieValue, tokenSetSchema ); + if ( !result.success ) { + logger.warn( { error: result.error }, "Invalid tokens cookie format" ); + return null; + } + return result.data as TokenSet; +} + +/** + * Safely parse the OIDC state cookie. + * Returns empty object if parsing fails. + */ +function parseOidcCookie( cookieValue: string | undefined ): OidcState { + if ( !cookieValue ) return {} as OidcState; + + try { + const parsed = JSON.parse( cookieValue ); + // Basic validation for expected fields + if ( typeof parsed !== "object" || parsed === null ) { + return {} as OidcState; + } + return { + state: typeof parsed.state === "string" ? parsed.state : "", + code_verifier: typeof parsed.code_verifier === "string" ? parsed.code_verifier : "" + }; + } catch { + logger.warn( "Invalid OIDC state cookie format" ); + return {} as OidcState; + } +} + async function delay( ms: number ) { await new Promise( ( resolve ) => setTimeout( resolve, ms ) ); } @@ -121,10 +167,7 @@ async function ensureConfig(): Promise { // Discovery is performed lazily on demand by routes via ensureConfig() app.get( "/", async ( req: Request, res: Response ) => { - const tokensCookie = ( req as CookieRequest ).cookies["tokens"]; - const tokens: TokenSet | null = tokensCookie - ? JSON.parse( tokensCookie ) - : null; + const tokens = parseTokensCookie( ( req as CookieRequest ).cookies["tokens"] ); res.render( "index", { tokens } ); } ); @@ -165,10 +208,8 @@ app.get( "/callback", async ( req: Request, res: Response ) => { // Force HTTPS protocol since we're behind a proxy const currentUrl = new URL( req.originalUrl, `https://${ req.get( "host" ) }` ); - const oidcCookie = ( req as CookieRequest ).cookies["oidc"]; - const cookieVal: OidcState = oidcCookie - ? JSON.parse( oidcCookie ) - : {} as OidcState; + // Safely parse OIDC state cookie + const cookieVal = parseOidcCookie( ( req as CookieRequest ).cookies["oidc"] ); logger.debug( { currentUrl: currentUrl.href }, "Callback - Current URL" ); logger.debug( { redirectUri: REDIRECT_URI }, "Callback - Expected Redirect URI" ); @@ -179,8 +220,10 @@ app.get( "/callback", async ( req: Request, res: Response ) => { // Check for OAuth errors (e.g., user canceled consent) const error = currentUrl.searchParams.get( "error" ); if ( error ) { - const errorDescription = currentUrl.searchParams.get( "error_description" ) || "The authorization request was not completed."; - logger.info( { error, errorDescription }, "Callback - OAuth error received" ); + // Escape error description to prevent XSS attacks + const rawErrorDescription = currentUrl.searchParams.get( "error_description" ) || "The authorization request was not completed."; + const errorDescription = escapeHtml( rawErrorDescription.slice( 0, 500 ) ); // Limit length and escape HTML + logger.info( { error: sanitizeForLogging( error ), errorDescription: sanitizeForLogging( rawErrorDescription ) }, "Callback - OAuth error received" ); // Clear the OIDC state cookie res.clearCookie( "oidc" ); @@ -295,8 +338,7 @@ app.get( "/callback", async ( req: Request, res: Response ) => { // Refresh token endpoint - manually trigger a token refresh app.post( "/refresh", async ( req: Request, res: Response ) => { - const tokensCookie = ( req as CookieRequest ).cookies["tokens"]; - const tokens: TokenSet | null = tokensCookie ? JSON.parse( tokensCookie ) : null; + const tokens = parseTokensCookie( ( req as CookieRequest ).cookies["tokens"] ); logger.debug( { hasTokens: !!tokens, @@ -358,10 +400,7 @@ app.post( "/refresh", async ( req: Request, res: Response ) => { } ); app.get( "/token", async ( req: Request, res: Response ) => { - const tokensCookie = ( req as CookieRequest ).cookies["tokens"]; - const tokens: TokenSet | null = tokensCookie - ? JSON.parse( tokensCookie ) - : null; + const tokens = parseTokensCookie( ( req as CookieRequest ).cookies["tokens"] ); if ( !tokens?.access_token || !tokens?.id_token ) return res.redirect( "/login" ); try { @@ -483,30 +522,38 @@ app.get( "/token", async ( req: Request, res: Response ) => { } ); app.get( "/api-explorer", async ( req: Request, res: Response ) => { - const tokensCookie = ( req as CookieRequest ).cookies["tokens"]; - const tokens: TokenSet | null = tokensCookie - ? JSON.parse( tokensCookie ) - : null; + const tokens = parseTokensCookie( ( req as CookieRequest ).cookies["tokens"] ); if ( !tokens?.access_token ) return res.redirect( "/login" ); res.render( "api-explorer", { tokens } ); } ); app.post( "/api-call", express.json(), async ( req: Request, res: Response ) => { - const tokensCookie = ( req as CookieRequest ).cookies["tokens"]; - const tokens: TokenSet | null = tokensCookie - ? JSON.parse( tokensCookie ) - : null; + const tokens = parseTokensCookie( ( req as CookieRequest ).cookies["tokens"] ); if ( !tokens?.access_token ) return res.status( 401 ).json( { error: "No access token" } ); - const { endpoint, method = "GET" } = req.body; - if ( !endpoint ) { - return res.status( 400 ).json( { error: "Endpoint is required" } ); + // Validate API call request against allow-list of endpoints and methods + const validationResult = apiCallSchema.safeParse( req.body ); + if ( !validationResult.success ) { + logger.warn( { + error: formatZodError( validationResult.error ), + rawEndpoint: sanitizeForLogging( String( req.body?.endpoint || "" ) ), + rawMethod: sanitizeForLogging( String( req.body?.method || "" ) ) + }, "POST /api-call - Validation failed" ); + return res.status( 400 ).json( { + error: "Validation failed", + details: formatZodError( validationResult.error ) + } ); } + const { endpoint, method } = validationResult.data; + try { const accessToken = tokens.access_token as string; - const apiResponse = await fetch( `${ API_BASE_URL }${ endpoint }`, { + // Clean the endpoint by removing any query strings (handled by validation schema) + const cleanEndpoint = endpoint.split( "?" )[0].split( "#" )[0]; + + const apiResponse = await fetch( `${ API_BASE_URL }${ cleanEndpoint }`, { method, headers: { Authorization: `Bearer ${ accessToken }`, @@ -540,10 +587,7 @@ app.post( "/api-call", express.json(), async ( req: Request, res: Response ) => } ); app.get( "/debug/tokens", async ( req: Request, res: Response ) => { - const tokensCookie = ( req as CookieRequest ).cookies["tokens"]; - const tokens: TokenSet | null = tokensCookie - ? JSON.parse( tokensCookie ) - : null; + const tokens = parseTokensCookie( ( req as CookieRequest ).cookies["tokens"] ); if ( !tokens?.access_token ) { return res.redirect( "/login" ); @@ -574,10 +618,7 @@ app.get( "/debug/tokens", async ( req: Request, res: Response ) => { app.get( "/logout", async ( req: Request, res: Response ) => { logger.info( "Logout route called" ); const config = await ensureConfig(); - const tokensCookie = ( req as CookieRequest ).cookies["tokens"]; - const tokens: TokenSet | null = tokensCookie - ? JSON.parse( tokensCookie ) - : null; + const tokens = parseTokensCookie( ( req as CookieRequest ).cookies["tokens"] ); logger.debug( { tokensPresent: !!tokens }, "Logout - tokens present" ); logger.debug( { idTokenPresent: !!tokens?.id_token }, "Logout - id_token present" ); diff --git a/apps/auth/Dockerfile b/apps/auth/Dockerfile new file mode 100644 index 0000000..c03a4b6 --- /dev/null +++ b/apps/auth/Dockerfile @@ -0,0 +1,76 @@ +# Authorization Server (OpenID Provider) Dockerfile +# Multi-stage build for optimal image size and security + +# ============================================================================= +# Stage 1: Build +# ============================================================================= +FROM node:22-alpine AS builder + +# Install pnpm +RUN corepack enable && corepack prepare pnpm@latest --activate + +WORKDIR /app + +# Copy workspace configuration +COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./ + +# Copy app and shared package files +COPY apps/auth/package.json ./apps/auth/ +COPY apps/shared/package.json ./apps/shared/ + +# Install dependencies +RUN pnpm install --frozen-lockfile + +# Copy source code +COPY apps/shared ./apps/shared +COPY apps/auth ./apps/auth +COPY tsconfig.base.json ./ + +# Build shared library first, then auth +RUN pnpm --filter @apps/shared build +RUN pnpm --filter @apps/auth build + +# ============================================================================= +# Stage 2: Production +# ============================================================================= +FROM node:22-alpine AS production + +# Install pnpm for deploy command +RUN corepack enable && corepack prepare pnpm@latest --activate + +# Create non-root user for security +RUN addgroup -g 1001 -S nodejs && \ + adduser -S nodejs -u 1001 + +WORKDIR /app + +# Copy workspace files +COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./ +COPY apps/auth/package.json ./apps/auth/ +COPY apps/shared/package.json ./apps/shared/ + +# Install production dependencies only +RUN pnpm install --frozen-lockfile --prod + +# Copy built artifacts from builder stage +COPY --from=builder /app/apps/shared/dist ./apps/shared/dist +COPY --from=builder /app/apps/auth/dist ./apps/auth/dist +COPY --from=builder /app/apps/auth/public ./apps/auth/public +COPY --from=builder /app/apps/auth/views ./apps/auth/views + +# Switch to non-root user +USER nodejs + +# Expose port +EXPOSE 3001 + +# Set environment defaults +ENV NODE_ENV=production +ENV OP_PORT=3001 + +# Health check +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3001/.well-known/openid-configuration || exit 1 + +# Start the application +CMD ["node", "apps/auth/dist/index.js"] diff --git a/apps/auth/public/styles.css b/apps/auth/public/styles.css index c065951..6d920d7 100644 --- a/apps/auth/public/styles.css +++ b/apps/auth/public/styles.css @@ -1,4 +1,4 @@ -/*! tailwindcss v4.1.15 | MIT License | https://tailwindcss.com */ +/*! tailwindcss v4.1.17 | MIT License | https://tailwindcss.com */ @layer properties; @layer theme, base, components, utilities; @layer theme { @@ -149,9 +149,7 @@ ::placeholder { color: currentcolor; @supports (color: color-mix(in lab, red, red)) { - & { - color: color-mix(in oklab, currentcolor 50%, transparent); - } + color: color-mix(in oklab, currentcolor 50%, transparent); } } } diff --git a/apps/auth/src/index.ts b/apps/auth/src/index.ts index e09780c..3aa2b45 100644 --- a/apps/auth/src/index.ts +++ b/apps/auth/src/index.ts @@ -12,6 +12,17 @@ import { setupBasicExpress, setupEJSTemplates } from "@apps/shared"; +import { + loginSchema, + interactionUidSchema, + oidcClientsSchema, + jwksSchema, + safeJsonParse, + sanitizeForLogging, + formatZodError, + type OIDCClientConfig as BaseOIDCClientConfig +} from "@apps/shared/validation"; +import { timingSafeEqual } from "crypto"; // Create logger for OP service // Debug logging can be enabled by setting LOG_LEVEL=debug in your .env file @@ -32,25 +43,47 @@ const ISSUER = getRequiredEnv( "OP_ISSUER", "https://id.localtest.me" ); const PORT = getRequiredEnvNumber( "OP_PORT", 3001 ); const API_AUDIENCE = getRequiredEnv( "API_AUDIENCE", "api://my-api" ); -// Load clients from environment variable, file, or defaults +// Load clients from environment variable, file, or defaults with schema validation function loadOIDCClients() { + let source: string; + // 1. Try OIDC_CLIENTS env var (JSON string) if ( process.env.OIDC_CLIENTS ) { - logger.info( "Loading OIDC clients from OIDC_CLIENTS environment variable" ); - return JSON.parse( process.env.OIDC_CLIENTS ); + source = "OIDC_CLIENTS environment variable"; + logger.info( `Loading OIDC clients from ${ source }` ); + const parseResult = safeJsonParse( process.env.OIDC_CLIENTS, oidcClientsSchema ); + if ( !parseResult.success ) { + logger.error( { error: parseResult.error }, `Invalid OIDC clients configuration from ${ source }` ); + throw new Error( `Invalid OIDC clients configuration: ${ parseResult.error }` ); + } + return parseResult.data; } // 2. Try .env.clients.json file in app directory (for easier multi-client config) const clientsFilePath = resolve( new URL( "../", import.meta.url ).pathname, ".env.clients.json" ); if ( existsSync( clientsFilePath ) ) { - logger.info( "Loading OIDC clients from apps/auth/.env.clients.json" ); - const fileContent = readFileSync( clientsFilePath, "utf-8" ); - return JSON.parse( fileContent ); + source = "apps/auth/.env.clients.json"; + logger.info( `Loading OIDC clients from ${ source }` ); + try { + const fileContent = readFileSync( clientsFilePath, "utf-8" ); + const parseResult = safeJsonParse( fileContent, oidcClientsSchema ); + if ( !parseResult.success ) { + logger.error( { error: parseResult.error }, `Invalid OIDC clients configuration from ${ source }` ); + throw new Error( `Invalid OIDC clients configuration: ${ parseResult.error }` ); + } + return parseResult.data; + } catch ( error ) { + if ( error instanceof Error && error.message.includes( "Invalid OIDC clients" ) ) { + throw error; + } + logger.error( { error }, `Failed to read OIDC clients file from ${ source }` ); + throw new Error( `Failed to read OIDC clients file: ${ error }` ); + } } - // 3. Fall back to single client from env vars + // 3. Fall back to single client from env vars (no validation needed - simple defaults) logger.info( "Loading single OIDC client from CLIENT_ID/CLIENT_SECRET env vars" ); - return [ + const rawClients = [ { client_id: getRequiredEnv( "CLIENT_ID", "dev-rp" ), client_secret: getRequiredEnv( "CLIENT_SECRET", "dev-secret" ), @@ -61,12 +94,20 @@ function loadOIDCClients() { token_endpoint_auth_method: "client_secret_basic" } ]; + + // Validate even the default configuration + const parseResult = oidcClientsSchema.safeParse( rawClients ); + if ( !parseResult.success ) { + logger.error( { error: formatZodError( parseResult.error ) }, "Invalid default OIDC client configuration" ); + throw new Error( `Invalid default OIDC client configuration: ${ formatZodError( parseResult.error ) }` ); + } + return parseResult.data; } const OIDC_CLIENTS = loadOIDCClients(); logger.info( `Loaded ${ OIDC_CLIENTS.length } OIDC client(s)` ); -// Load JWKS (JSON Web Key Set) for token signing +// Load JWKS (JSON Web Key Set) for token signing with schema validation // If not provided, oidc-provider will generate ephemeral keys with kid "keystore-CHANGE-ME" function loadJWKS() { const jwksEnv = process.env.JWKS; @@ -77,27 +118,20 @@ function loadJWKS() { return undefined; } - try { - const jwks = JSON.parse( jwksEnv ); - logger.info( `Loaded JWKS with ${ jwks.keys?.length || 0 } key(s)` ); - return jwks; - } catch ( error ) { - logger.error( { error }, "Failed to parse JWKS environment variable" ); - throw new Error( "Invalid JWKS configuration" ); + const parseResult = safeJsonParse( jwksEnv, jwksSchema ); + if ( !parseResult.success ) { + logger.error( { error: parseResult.error }, "Invalid JWKS configuration" ); + throw new Error( `Invalid JWKS configuration: ${ parseResult.error }` ); } + + logger.info( `Loaded JWKS with ${ parseResult.data.keys.length } key(s)` ); + return parseResult.data; } const JWKS = loadJWKS(); -// Define client type with optional force_refresh_token flag -interface OIDCClientConfig { - client_id: string; - client_secret: string; - redirect_uris: string[]; - post_logout_redirect_uris: string[]; - grant_types: string[]; - response_types: string[]; - token_endpoint_auth_method: string; +// Extend the validated client config type with optional force_refresh_token flag +interface OIDCClientConfig extends BaseOIDCClientConfig { force_refresh_token?: boolean; } @@ -158,6 +192,37 @@ const USERS = new Map< ] ] ); +/** + * Timing-safe password comparison to prevent timing attacks. + * Always compares full strings even if they differ in length. + */ +function secureComparePasswords( provided: string, stored: string ): boolean { + // Pad to same length to prevent length-based timing leaks + const maxLength = Math.max( provided.length, stored.length ); + const paddedProvided = provided.padEnd( maxLength, "\0" ); + const paddedStored = stored.padEnd( maxLength, "\0" ); + + try { + return timingSafeEqual( + Buffer.from( paddedProvided, "utf8" ), + Buffer.from( paddedStored, "utf8" ) + ) && provided.length === stored.length; + } catch { + return false; + } +} + +/** + * Validate interaction UID path parameter. + */ +function validateInteractionUid( uid: string ): { success: true; data: string } | { success: false; error: string } { + const result = interactionUidSchema.safeParse( uid ); + if ( !result.success ) { + return { success: false, error: formatZodError( result.error ) }; + } + return { success: true, data: result.data }; +} + // eslint-disable-next-line @typescript-eslint/no-explicit-any const configuration: any = { clients: SANITIZED_CLIENTS, @@ -377,7 +442,13 @@ async function main() { // Interactions (login + consent) in-process for simplicity app.get( "/interaction/:uid", async ( req: Request, res: Response ) => { try { - const { uid } = req.params; + // Validate interaction UID path parameter + const uidResult = validateInteractionUid( req.params.uid ); + if ( !uidResult.success ) { + logger.warn( { rawUid: sanitizeForLogging( req.params.uid ), error: uidResult.error }, "GET /interaction/:uid - Invalid UID format" ); + return res.status( 400 ).send( "Invalid interaction identifier" ); + } + const uid = uidResult.data; logger.debug( { uid, query: req.query, cookies: req.cookies }, "GET /interaction/:uid - Request received" ); const details = await provider.interactionDetails( req, res ); @@ -430,19 +501,47 @@ async function main() { express.urlencoded( { extended: false } ), async ( req: Request, res: Response ) => { try { - const { uid } = req.params; - const email = String( req.body?.email || "" ); - const password = String( req.body?.password || "" ); + // Validate interaction UID path parameter + const uidResult = validateInteractionUid( req.params.uid ); + if ( !uidResult.success ) { + logger.warn( { rawUid: sanitizeForLogging( req.params.uid ), error: uidResult.error }, "POST /interaction/:uid/login - Invalid UID format" ); + return res.status( 400 ).send( "Invalid interaction identifier" ); + } + const uid = uidResult.data; + + // Validate login input using schema with length and format checks + const loginResult = loginSchema.safeParse( req.body ); + if ( !loginResult.success ) { + logger.debug( { uid, error: formatZodError( loginResult.error ) }, "POST /interaction/:uid/login - Input validation failed" ); + + // Get interaction details to extract scopes for re-rendering + const details = await provider.interactionDetails( req, res ); + const requestedScopes = String( details.params.scope || "" ) + .split( " " ) + .filter( Boolean ); + + // Re-render login form with validation error + return res.render( "interaction", { + uid, + prompt: "login", + scopes: requestedScopes, + error: "Invalid email or password format.", + email: String( req.body?.email || "" ).slice( 0, 254 ) // Preserve truncated email + } ); + } + + const { email, password } = loginResult.data; logger.debug( { uid, - email, + email: sanitizeForLogging( email ), passwordProvided: !!password }, "POST /interaction/:uid/login - Login attempt" ); const user = USERS.get( email ); - if ( !user || user.password != password ) { - logger.debug( { uid, email, userFound: !!user }, "POST /interaction/:uid/login - Authentication failed" ); + // Use timing-safe comparison to prevent timing attacks + if ( !user || !secureComparePasswords( password, user.password ) ) { + logger.debug( { uid, email: sanitizeForLogging( email ), userFound: !!user }, "POST /interaction/:uid/login - Authentication failed" ); // Get interaction details to extract scopes for re-rendering const details = await provider.interactionDetails( req, res ); @@ -534,7 +633,13 @@ async function main() { express.urlencoded( { extended: false } ), async ( req: Request, res: Response ) => { try { - const { uid } = req.params; + // Validate interaction UID path parameter + const uidResult = validateInteractionUid( req.params.uid ); + if ( !uidResult.success ) { + logger.warn( { rawUid: sanitizeForLogging( req.params.uid ), error: uidResult.error }, "POST /interaction/:uid/confirm - Invalid UID format" ); + return res.status( 400 ).send( "Invalid interaction identifier" ); + } + const uid = uidResult.data; logger.debug( { uid }, "POST /interaction/:uid/confirm - Consent confirmation received" ); const details = await provider.interactionDetails( req, res ); @@ -644,7 +749,13 @@ async function main() { "/interaction/:uid/cancel", express.urlencoded( { extended: false } ), async ( req: Request, res: Response ) => { - const { uid } = req.params; + // Validate interaction UID path parameter + const uidResult = validateInteractionUid( req.params.uid ); + if ( !uidResult.success ) { + logger.warn( { rawUid: sanitizeForLogging( req.params.uid ), error: uidResult.error }, "POST /interaction/:uid/cancel - Invalid UID format" ); + return res.status( 400 ).send( "Invalid interaction identifier" ); + } + const uid = uidResult.data; logger.debug( { uid }, "POST /interaction/:uid/cancel - Consent cancelled" ); const details = await provider.interactionDetails( req, res ); diff --git a/apps/shared/package.json b/apps/shared/package.json index 6fe2202..46db93c 100644 --- a/apps/shared/package.json +++ b/apps/shared/package.json @@ -26,6 +26,10 @@ "./middleware": { "import": "./dist/middleware.js", "types": "./dist/middleware.d.ts" + }, + "./validation": { + "import": "./dist/validation.js", + "types": "./dist/validation.d.ts" } }, "scripts": { @@ -44,7 +48,8 @@ "dependencies": { "helmet": "^8.1.0", "pino": "^9.14.0", - "pino-pretty": "^13.1.3" + "pino-pretty": "^13.1.3", + "zod": "^4.3.5" }, "devDependencies": { "@types/express": "^5.0.6", diff --git a/apps/shared/src/index.ts b/apps/shared/src/index.ts index ac276ae..79f7fd4 100644 --- a/apps/shared/src/index.ts +++ b/apps/shared/src/index.ts @@ -11,6 +11,7 @@ export * from "./security.js"; export * from "./environment.js"; export * from "./logging.js"; export * from "./middleware.js"; +export * from "./validation.js"; // Version and metadata export const version = "0.1.0"; diff --git a/apps/shared/src/validation.ts b/apps/shared/src/validation.ts new file mode 100644 index 0000000..a229259 --- /dev/null +++ b/apps/shared/src/validation.ts @@ -0,0 +1,520 @@ +/** + * @apps/shared - Input Validation Utilities + * + * Implements thorough input validation using positive (allow-list) validation + * approaches for expected format, length, and type per IOH-IV-01 requirements. + */ + +import { z } from "zod"; +import type { Request, Response, NextFunction } from "express"; + +// ============================================================================= +// VALIDATION CONSTANTS +// ============================================================================= + +/** Maximum allowed pagination offset to prevent DoS via large offsets */ +export const MAX_PAGINATION_OFFSET = 100000; + +/** Maximum allowed pagination limit to prevent memory exhaustion */ +export const MAX_PAGINATION_LIMIT = 1000; + +/** Default pagination limit when not specified */ +export const DEFAULT_PAGINATION_LIMIT = 100; + +/** Maximum length for account IDs */ +export const MAX_ACCOUNT_ID_LENGTH = 50; + +/** Maximum length for statement IDs */ +export const MAX_STATEMENT_ID_LENGTH = 50; + +/** Maximum length for interaction UIDs */ +export const MAX_INTERACTION_UID_LENGTH = 100; + +/** Maximum length for email addresses */ +export const MAX_EMAIL_LENGTH = 254; + +/** Maximum length for passwords */ +export const MAX_PASSWORD_LENGTH = 128; + +/** Minimum length for passwords */ +export const MIN_PASSWORD_LENGTH = 1; + +/** Maximum reasonable date range in years for queries */ +export const MAX_DATE_RANGE_YEARS = 10; + +// ============================================================================= +// PAGINATION SCHEMAS +// ============================================================================= + +/** + * Schema for pagination query parameters with bounds checking. + * Uses positive validation with explicit min/max constraints. + */ +export const paginationSchema = z.object( { + offset: z + .string() + .optional() + .transform( ( val ) => { + if ( !val ) return 0; + const num = parseInt( val, 10 ); + if ( isNaN( num ) || num < 0 ) return 0; + return Math.min( num, MAX_PAGINATION_OFFSET ); + } ), + limit: z + .string() + .optional() + .transform( ( val ) => { + if ( !val ) return DEFAULT_PAGINATION_LIMIT; + const num = parseInt( val, 10 ); + if ( isNaN( num ) || num < 1 ) return DEFAULT_PAGINATION_LIMIT; + return Math.min( num, MAX_PAGINATION_LIMIT ); + } ) +} ); + +export type PaginationParams = z.infer; + +// ============================================================================= +// ACCOUNT ID SCHEMAS +// ============================================================================= + +/** + * Allow-list pattern for account IDs. + * Accepts: UUID format OR account-{number} format + */ +const UUID_PATTERN = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i; +const ACCOUNT_ID_PATTERN = /^account-[0-9]+$/; + +/** + * Schema for validating account IDs using allow-list approach. + * Accepts UUID or account-{number} format with length constraints. + */ +export const accountIdSchema = z + .string() + .min( 1, "Account ID is required" ) + .max( MAX_ACCOUNT_ID_LENGTH, `Account ID must not exceed ${ MAX_ACCOUNT_ID_LENGTH } characters` ) + .refine( + ( val ) => UUID_PATTERN.test( val ) || ACCOUNT_ID_PATTERN.test( val ), + { message: "Account ID must be a valid UUID or match pattern 'account-{number}'" } + ); + +/** + * Schema for validating statement IDs using allow-list approach. + */ +export const statementIdSchema = z + .string() + .min( 1, "Statement ID is required" ) + .max( MAX_STATEMENT_ID_LENGTH, `Statement ID must not exceed ${ MAX_STATEMENT_ID_LENGTH } characters` ) + .refine( + ( val ) => UUID_PATTERN.test( val ) || /^stmt-[0-9]+$/.test( val ), + { message: "Statement ID must be a valid UUID or match pattern 'stmt-{number}'" } + ); + +/** + * Schema for validating interaction UIDs (from oidc-provider). + * Allow-list: alphanumeric with hyphens and underscores. + */ +export const interactionUidSchema = z + .string() + .min( 1, "Interaction UID is required" ) + .max( MAX_INTERACTION_UID_LENGTH, `Interaction UID must not exceed ${ MAX_INTERACTION_UID_LENGTH } characters` ) + .regex( /^[a-zA-Z0-9_-]+$/, "Interaction UID contains invalid characters" ); + +// ============================================================================= +// DATE SCHEMAS +// ============================================================================= + +/** + * Schema for ISO 8601 date strings with reasonable range validation. + */ +export const dateStringSchema = z + .string() + .refine( + ( val ) => { + const date = new Date( val ); + return !isNaN( date.getTime() ); + }, + { message: "Invalid date format. Use ISO 8601 format (e.g., 2024-01-15T00:00:00Z)" } + ) + .refine( + ( val ) => { + const date = new Date( val ); + const now = new Date(); + const minDate = new Date( now.getFullYear() - MAX_DATE_RANGE_YEARS, 0, 1 ); + const maxDate = new Date( now.getFullYear() + 1, 11, 31 ); + return date >= minDate && date <= maxDate; + }, + { message: `Date must be within a reasonable range (within ${ MAX_DATE_RANGE_YEARS } years)` } + ); + +/** + * Schema for optional date range parameters. + */ +export const dateRangeSchema = z.object( { + startTime: z.string().optional(), + endTime: z.string().optional() +} ).refine( + ( data ) => { + if ( !data.startTime && !data.endTime ) return true; + if ( data.startTime && !isValidDateString( data.startTime ) ) return false; + if ( data.endTime && !isValidDateString( data.endTime ) ) return false; + if ( data.startTime && data.endTime ) { + return new Date( data.startTime ) <= new Date( data.endTime ); + } + return true; + }, + { message: "Invalid date range: startTime must be before or equal to endTime" } +); + +/** Helper to validate date strings */ +function isValidDateString( val: string ): boolean { + const date = new Date( val ); + if ( isNaN( date.getTime() ) ) return false; + const now = new Date(); + const minDate = new Date( now.getFullYear() - MAX_DATE_RANGE_YEARS, 0, 1 ); + const maxDate = new Date( now.getFullYear() + 1, 11, 31 ); + return date >= minDate && date <= maxDate; +} + +// ============================================================================= +// AUTHENTICATION SCHEMAS +// ============================================================================= + +/** + * Schema for email validation using allow-list approach. + * Based on RFC 5322 simplified pattern with length constraints. + */ +export const emailSchema = z + .string() + .min( 1, "Email is required" ) + .max( MAX_EMAIL_LENGTH, `Email must not exceed ${ MAX_EMAIL_LENGTH } characters` ) + .email( "Invalid email format" ) + .transform( ( val ) => val.toLowerCase().trim() ); + +/** + * Schema for password validation with length constraints. + * Note: Actual password strength validation should be done at registration. + */ +export const passwordSchema = z + .string() + .min( MIN_PASSWORD_LENGTH, `Password must be at least ${ MIN_PASSWORD_LENGTH } character` ) + .max( MAX_PASSWORD_LENGTH, `Password must not exceed ${ MAX_PASSWORD_LENGTH } characters` ); + +/** + * Schema for login request body. + */ +export const loginSchema = z.object( { + email: emailSchema, + password: passwordSchema +} ); + +export type LoginInput = z.infer; + +// ============================================================================= +// API ENDPOINT ALLOW-LIST +// ============================================================================= + +/** + * Allow-list of valid API endpoints for the API Explorer. + * Uses pattern matching to support parameterized routes. + */ +export const ALLOWED_API_ENDPOINTS: RegExp[] = [ + /^\/api\/fdx\/v6\/customers\/current$/, + /^\/api\/fdx\/v6\/accounts$/, + /^\/api\/fdx\/v6\/accounts\/[a-zA-Z0-9_-]+$/, + /^\/api\/fdx\/v6\/accounts\/[a-zA-Z0-9_-]+\/contact$/, + /^\/api\/fdx\/v6\/accounts\/[a-zA-Z0-9_-]+\/statements$/, + /^\/api\/fdx\/v6\/accounts\/[a-zA-Z0-9_-]+\/statements\/[a-zA-Z0-9_-]+$/, + /^\/api\/fdx\/v6\/accounts\/[a-zA-Z0-9_-]+\/transactions$/, + /^\/api\/fdx\/v6\/accounts\/[a-zA-Z0-9_-]+\/payment-networks$/, + /^\/api\/fdx\/v6\/accounts\/[a-zA-Z0-9_-]+\/asset-transfer-networks$/ +]; + +/** + * Allow-list of valid HTTP methods for the API Explorer. + */ +export const ALLOWED_HTTP_METHODS = [ "GET", "HEAD", "OPTIONS" ] as const; + +export type AllowedHttpMethod = typeof ALLOWED_HTTP_METHODS[number]; + +/** + * Schema for API call requests from the API Explorer. + */ +export const apiCallSchema = z.object( { + endpoint: z + .string() + .min( 1, "Endpoint is required" ) + .max( 500, "Endpoint too long" ) + .refine( + ( val ) => { + // Normalize the endpoint (remove query strings and fragments) + const cleanEndpoint = val.split( "?" )[0].split( "#" )[0]; + return ALLOWED_API_ENDPOINTS.some( ( pattern ) => pattern.test( cleanEndpoint ) ); + }, + { message: "Endpoint not in allow-list of valid API endpoints" } + ), + method: z + .string() + .optional() + .transform( ( val ) => val?.toUpperCase() || "GET" ) + .refine( + ( val ): val is AllowedHttpMethod => ALLOWED_HTTP_METHODS.includes( val as AllowedHttpMethod ), + { message: `HTTP method must be one of: ${ ALLOWED_HTTP_METHODS.join( ", " ) }` } + ) +} ); + +export type ApiCallInput = z.infer; + +// ============================================================================= +// CONFIGURATION SCHEMAS +// ============================================================================= + +/** + * Schema for OIDC client configuration with allow-list validation. + */ +export const oidcClientSchema = z.object( { + client_id: z.string().min( 1 ).max( 100 ), + client_secret: z.string().min( 1 ).max( 500 ), + redirect_uris: z.array( z.string().url() ).min( 1 ), + post_logout_redirect_uris: z.array( z.string().url() ).optional(), + grant_types: z.array( + z.enum( [ "authorization_code", "refresh_token", "client_credentials" ] ) + ), + response_types: z.array( z.enum( [ "code", "token", "id_token" ] ) ), + token_endpoint_auth_method: z.enum( [ + "client_secret_basic", + "client_secret_post", + "none" + ] ).optional() +} ); + +export const oidcClientsSchema = z.array( oidcClientSchema ); + +export type OIDCClientConfig = z.infer; + +/** + * Schema for JWKS (JSON Web Key Set) configuration. + * Validates structure according to RFC 7517. + */ +export const jwkSchema = z.object( { + kty: z.enum( [ "RSA", "EC", "OKP", "oct" ] ), + use: z.enum( [ "sig", "enc" ] ).optional(), + key_ops: z.array( z.string() ).optional(), + alg: z.string().optional(), + kid: z.string().optional(), + // RSA specific + n: z.string().optional(), + e: z.string().optional(), + d: z.string().optional(), + p: z.string().optional(), + q: z.string().optional(), + dp: z.string().optional(), + dq: z.string().optional(), + qi: z.string().optional(), + // EC specific + crv: z.string().optional(), + x: z.string().optional(), + y: z.string().optional() +} ); + +export const jwksSchema = z.object( { + keys: z.array( jwkSchema ).min( 1, "JWKS must contain at least one key" ) +} ); + +export type JWKSConfig = z.infer; + +// ============================================================================= +// JSON PARSING UTILITIES +// ============================================================================= + +/** + * Safely parse JSON with validation schema. + * Returns result object with success flag, data, or error. + */ +export function safeJsonParse( + jsonString: string, + schema: z.ZodType +): { success: true; data: T } | { success: false; error: string } { + try { + const parsed = JSON.parse( jsonString ); + const result = schema.safeParse( parsed ); + if ( result.success ) { + return { success: true, data: result.data }; + } + return { success: false, error: formatZodError( result.error ) }; + } catch ( e ) { + return { + success: false, + error: e instanceof Error ? `JSON parse error: ${ e.message }` : "Invalid JSON" + }; + } +} + +/** + * Format Zod validation errors into a readable string. + * Compatible with Zod v4 which uses `issues` instead of `errors`. + */ +export function formatZodError( error: z.ZodError ): string { + return error.issues + .map( ( issue ) => `${ issue.path.join( "." ) }: ${ issue.message }` ) + .join( "; " ); +} + +// ============================================================================= +// EXPRESS MIDDLEWARE FACTORIES +// ============================================================================= + +/** + * Create Express middleware for validating request body against a schema. + */ +export function validateBody( schema: z.ZodType ) { + return ( req: Request, res: Response, next: NextFunction ): void => { + const result = schema.safeParse( req.body ); + if ( !result.success ) { + res.status( 400 ).json( { + error: "Validation failed", + details: formatZodError( result.error ) + } ); + return; + } + req.body = result.data; + next(); + }; +} + +/** + * Create Express middleware for validating request query parameters. + */ +export function validateQuery( schema: z.ZodType ) { + return ( req: Request, res: Response, next: NextFunction ): void => { + const result = schema.safeParse( req.query ); + if ( !result.success ) { + res.status( 400 ).json( { + error: "Validation failed", + details: formatZodError( result.error ) + } ); + return; + } + ( req as Request & { validatedQuery: T } ).validatedQuery = result.data; + next(); + }; +} + +/** + * Create Express middleware for validating request path parameters. + */ +export function validateParams( schema: z.ZodType ) { + return ( req: Request, res: Response, next: NextFunction ): void => { + const result = schema.safeParse( req.params ); + if ( !result.success ) { + res.status( 400 ).json( { + error: "Validation failed", + details: formatZodError( result.error ) + } ); + return; + } + ( req as Request & { validatedParams: T } ).validatedParams = result.data; + next(); + }; +} + +// ============================================================================= +// SANITIZATION UTILITIES +// ============================================================================= + +/** + * Sanitize string for safe logging (remove newlines, control characters). + */ +export function sanitizeForLogging( input: string, maxLength = 200 ): string { + return input + // eslint-disable-next-line no-control-regex + .replace( /[\x00-\x1F\x7F]/g, "" ) // Remove control characters + .replace( /[\r\n]/g, " " ) // Replace newlines with spaces + .slice( 0, maxLength ); +} + +/** + * Escape HTML entities to prevent XSS in error messages. + */ +export function escapeHtml( input: string ): string { + const htmlEntities: Record = { + "&": "&", + "<": "<", + ">": ">", + "\"": """, + "'": "'", + "/": "/" + }; + return input.replace( /[&<>"'/]/g, ( char ) => htmlEntities[char] || char ); +} + +// ============================================================================= +// TOKEN SCHEMAS +// ============================================================================= + +/** + * Schema for token cookie content (after JSON parsing). + */ +export const tokenSetSchema = z.object( { + access_token: z.string().min( 1 ), + token_type: z.string().optional(), + id_token: z.string().optional(), + refresh_token: z.string().optional(), + expires_at: z.number().optional(), + scope: z.string().optional() +} ); + +export type TokenSetInput = z.infer; + +// ============================================================================= +// COMBINED SCHEMAS FOR ROUTES +// ============================================================================= + +/** + * Schema for account endpoints with pagination. + */ +export const accountsQuerySchema = paginationSchema; + +/** + * Schema for statements/transactions endpoints with date range and pagination. + */ +export const dateRangePaginationSchema = z.object( { + offset: paginationSchema.shape.offset, + limit: paginationSchema.shape.limit, + startTime: z.string().optional(), + endTime: z.string().optional() +} ).refine( + ( data ) => { + if ( !data.startTime && !data.endTime ) return true; + if ( data.startTime && !isValidDateString( data.startTime ) ) return false; + if ( data.endTime && !isValidDateString( data.endTime ) ) return false; + if ( data.startTime && data.endTime ) { + return new Date( data.startTime ) <= new Date( data.endTime ); + } + return true; + }, + { message: "Invalid date range: dates must be valid and startTime must be before or equal to endTime" } +); + +export type DateRangePaginationParams = z.infer; + +/** + * Schema for account path parameter. + */ +export const accountParamsSchema = z.object( { + accountId: accountIdSchema +} ); + +/** + * Schema for statement path parameters. + */ +export const statementParamsSchema = z.object( { + accountId: accountIdSchema, + statementId: statementIdSchema +} ); + +/** + * Schema for interaction path parameter. + */ +export const interactionParamsSchema = z.object( { + uid: interactionUidSchema +} ); diff --git a/docker-compose.prod.example.yml b/docker-compose.prod.example.yml new file mode 100644 index 0000000..aeaa149 --- /dev/null +++ b/docker-compose.prod.example.yml @@ -0,0 +1,181 @@ +# Docker Compose Production Example +# ============================================================================= +# This is an EXAMPLE configuration for production deployments. +# Copy this file and modify it according to your infrastructure requirements. +# +# IMPORTANT: This file is provided as a reference only. You should: +# 1. Use proper secret management (not environment variables in compose files) +# 2. Configure proper networking for your infrastructure +# 3. Set up proper logging and monitoring +# 4. Use a container registry for your images +# 5. Consider using Kubernetes or similar for production orchestration +# +# Usage: +# cp docker-compose.prod.example.yml docker-compose.prod.yml +# # Edit docker-compose.prod.yml with your production values +# docker compose -f docker-compose.prod.yml up -d +# ============================================================================= + +services: + auth: + image: ${REGISTRY:-ghcr.io/your-org}/core-exchange-auth:${VERSION:-latest} + # Or build from source: + # build: + # context: . + # dockerfile: apps/auth/Dockerfile + container_name: core-exchange-auth + ports: + - "3001:3001" + environment: + - NODE_ENV=production + - OP_ISSUER=${OP_ISSUER} + - OP_PORT=3001 + - CLIENT_ID=${CLIENT_ID} + - CLIENT_SECRET=${CLIENT_SECRET} + - REDIRECT_URI=${REDIRECT_URI} + - API_AUDIENCE=${API_AUDIENCE} + - JWKS=${JWKS} + - LOG_LEVEL=info + # For production, consider using Docker secrets or external secret management + # secrets: + # - client_secret + # - jwks + healthcheck: + test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3001/.well-known/openid-configuration"] + interval: 30s + timeout: 3s + retries: 3 + start_period: 10s + restart: always + deploy: + resources: + limits: + cpus: '1' + memory: 512M + reservations: + cpus: '0.25' + memory: 128M + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + + api: + image: ${REGISTRY:-ghcr.io/your-org}/core-exchange-api:${VERSION:-latest} + container_name: core-exchange-api + ports: + - "3003:3003" + environment: + - NODE_ENV=production + - OP_ISSUER=${OP_ISSUER} + - API_HOST=${API_HOST} + - API_PORT=3003 + - API_AUDIENCE=${API_AUDIENCE} + - LOG_LEVEL=info + depends_on: + auth: + condition: service_healthy + healthcheck: + test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3003/health"] + interval: 30s + timeout: 3s + retries: 3 + start_period: 10s + restart: always + deploy: + resources: + limits: + cpus: '1' + memory: 512M + reservations: + cpus: '0.25' + memory: 128M + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + + app: + image: ${REGISTRY:-ghcr.io/your-org}/core-exchange-app:${VERSION:-latest} + container_name: core-exchange-app + ports: + - "3004:3004" + environment: + - NODE_ENV=production + - OP_ISSUER=${OP_ISSUER} + - APP_HOST=${APP_HOST} + - APP_PORT=3004 + - CLIENT_ID=${CLIENT_ID} + - CLIENT_SECRET=${CLIENT_SECRET} + - REDIRECT_URI=${REDIRECT_URI} + - API_BASE_URL=${API_BASE_URL} + - API_AUDIENCE=${API_AUDIENCE} + - COOKIE_SECRET=${COOKIE_SECRET} + - LOG_LEVEL=info + depends_on: + auth: + condition: service_healthy + api: + condition: service_healthy + healthcheck: + test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3004/"] + interval: 30s + timeout: 3s + retries: 3 + start_period: 10s + restart: always + deploy: + resources: + limits: + cpus: '1' + memory: 512M + reservations: + cpus: '0.25' + memory: 128M + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + + # Reverse proxy for HTTPS termination + caddy: + image: caddy:2-alpine + container_name: core-exchange-caddy + ports: + - "80:80" + - "443:443" + volumes: + - ./caddyfile.prod:/etc/caddy/Caddyfile:ro + - caddy_data:/data + - caddy_config:/config + depends_on: + - auth + - api + - app + restart: always + deploy: + resources: + limits: + cpus: '0.5' + memory: 128M + logging: + driver: "json-file" + options: + max-size: "10m" + max-file: "3" + +volumes: + caddy_data: + caddy_config: + +# Example Docker secrets configuration (uncomment if using secrets) +# secrets: +# client_secret: +# external: true +# cookie_secret: +# external: true +# jwks: +# external: true diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..87d9416 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,118 @@ +# Docker Compose for Local Development +# ============================================================================= +# This configuration builds and runs all services locally using Docker. +# +# Usage: +# docker compose up --build # Build and start all services +# docker compose up -d # Start in detached mode +# docker compose logs -f # Follow logs +# docker compose down # Stop and remove containers +# +# Note: For local HTTPS, you'll still need Caddy running on the host. +# This compose file exposes services on their default ports. +# ============================================================================= + +services: + auth: + build: + context: . + dockerfile: apps/auth/Dockerfile + container_name: core-exchange-auth + ports: + - "3001:3001" + environment: + - NODE_ENV=production + - OP_ISSUER=${OP_ISSUER:-https://id.localtest.me} + - OP_PORT=3001 + - CLIENT_ID=${CLIENT_ID:-dev-rp} + - CLIENT_SECRET=${CLIENT_SECRET:-dev-secret-CHANGE-FOR-PRODUCTION} + - REDIRECT_URI=${REDIRECT_URI:-https://app.localtest.me/callback} + - API_AUDIENCE=${API_AUDIENCE:-api://my-api} + - LOG_LEVEL=${LOG_LEVEL:-info} + healthcheck: + test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3001/.well-known/openid-configuration"] + interval: 30s + timeout: 3s + retries: 3 + start_period: 10s + restart: unless-stopped + + api: + build: + context: . + dockerfile: apps/api/Dockerfile + container_name: core-exchange-api + ports: + - "3003:3003" + environment: + - NODE_ENV=production + - OP_ISSUER=${OP_ISSUER:-https://id.localtest.me} + - API_HOST=${API_HOST:-https://api.localtest.me} + - API_PORT=3003 + - API_AUDIENCE=${API_AUDIENCE:-api://my-api} + - LOG_LEVEL=${LOG_LEVEL:-info} + depends_on: + auth: + condition: service_healthy + healthcheck: + test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3003/health"] + interval: 30s + timeout: 3s + retries: 3 + start_period: 10s + restart: unless-stopped + + app: + build: + context: . + dockerfile: apps/app/Dockerfile + container_name: core-exchange-app + ports: + - "3004:3004" + environment: + - NODE_ENV=production + - OP_ISSUER=${OP_ISSUER:-https://id.localtest.me} + - APP_HOST=${APP_HOST:-https://app.localtest.me} + - APP_PORT=3004 + - CLIENT_ID=${CLIENT_ID:-dev-rp} + - CLIENT_SECRET=${CLIENT_SECRET:-dev-secret-CHANGE-FOR-PRODUCTION} + - REDIRECT_URI=${REDIRECT_URI:-https://app.localtest.me/callback} + - API_BASE_URL=${API_BASE_URL:-https://api.localtest.me} + - API_AUDIENCE=${API_AUDIENCE:-api://my-api} + - COOKIE_SECRET=${COOKIE_SECRET:-dev-cookie-secret-CHANGE-FOR-PRODUCTION} + - LOG_LEVEL=${LOG_LEVEL:-info} + depends_on: + auth: + condition: service_healthy + api: + condition: service_healthy + healthcheck: + test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3004/"] + interval: 30s + timeout: 3s + retries: 3 + start_period: 10s + restart: unless-stopped + +# Optional: Add a Caddy service for HTTPS termination +# Uncomment the following to include Caddy in the compose stack +# +# caddy: +# image: caddy:2-alpine +# container_name: core-exchange-caddy +# ports: +# - "80:80" +# - "443:443" +# volumes: +# - ./caddyfile:/etc/caddy/Caddyfile:ro +# - caddy_data:/data +# - caddy_config:/config +# depends_on: +# - auth +# - api +# - app +# restart: unless-stopped +# +# volumes: +# caddy_data: +# caddy_config: diff --git a/package.json b/package.json index dba7d90..9d01e84 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "core-exchange-node-example", "private": true, "version": "0.1.1", - "packageManager": "pnpm@10.26.0+sha512.3b3f6c725ebe712506c0ab1ad4133cf86b1f4b687effce62a9b38b4d72e3954242e643190fc51fa1642949c735f403debd44f5cb0edd657abe63a8b6a7e1e402", + "packageManager": "pnpm@10.28.0+sha512.05df71d1421f21399e053fde567cea34d446fa02c76571441bfc1c7956e98e363088982d940465fd34480d4d90a0668bc12362f8aa88000a64e83d0b0e47be48", "type": "module", "author": "David Neal (https://reverentgeek.com)", "contributors": [], @@ -25,12 +25,12 @@ }, "devDependencies": { "@eslint/js": "^9.39.2", - "@stylistic/eslint-plugin": "^5.6.1", - "@typescript-eslint/eslint-plugin": "^8.50.0", - "@typescript-eslint/parser": "^8.50.0", + "@stylistic/eslint-plugin": "^5.7.0", + "@typescript-eslint/eslint-plugin": "^8.53.0", + "@typescript-eslint/parser": "^8.53.0", "concurrently": "^9.2.1", "eslint": "^9.39.2", - "globals": "^16.5.0", + "globals": "^17.0.0", "typescript": "^5.9.3" }, "dependencies": { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6b8728d..efc58d6 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -16,14 +16,14 @@ importers: specifier: ^9.39.2 version: 9.39.2 '@stylistic/eslint-plugin': - specifier: ^5.6.1 - version: 5.6.1(eslint@9.39.2(jiti@2.6.1)) + specifier: ^5.7.0 + version: 5.7.0(eslint@9.39.2(jiti@2.6.1)) '@typescript-eslint/eslint-plugin': - specifier: ^8.50.0 - version: 8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + specifier: ^8.53.0 + version: 8.53.0(@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) '@typescript-eslint/parser': - specifier: ^8.50.0 - version: 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + specifier: ^8.53.0 + version: 8.53.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) concurrently: specifier: ^9.2.1 version: 9.2.1 @@ -31,8 +31,8 @@ importers: specifier: ^9.39.2 version: 9.39.2(jiti@2.6.1) globals: - specifier: ^16.5.0 - version: 16.5.0 + specifier: ^17.0.0 + version: 17.0.0 typescript: specifier: ^5.9.3 version: 5.9.3 @@ -195,6 +195,9 @@ importers: pino-pretty: specifier: ^13.1.3 version: 13.1.3 + zod: + specifier: ^4.3.5 + version: 4.3.5 devDependencies: '@types/express': specifier: ^5.0.6 @@ -312,8 +315,8 @@ packages: resolution: {integrity: sha512-d5X6RMYv6taIymSk8JBP+nxv8DQAMY6A51GPgusqLdK9wBz5wWIXy1KjTck6HnjE9hqJzJRdk+1p/t5soSbCtw==} engines: {node: '>=18'} - '@eslint-community/eslint-utils@4.9.0': - resolution: {integrity: sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==} + '@eslint-community/eslint-utils@4.9.1': + resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 @@ -449,8 +452,8 @@ packages: '@pinojs/redact@0.4.0': resolution: {integrity: sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg==} - '@stylistic/eslint-plugin@5.6.1': - resolution: {integrity: sha512-JCs+MqoXfXrRPGbGmho/zGS/jMcn3ieKl/A8YImqib76C8kjgZwq5uUFzc30lJkMvcchuRn6/v8IApLxli3Jyw==} + '@stylistic/eslint-plugin@5.7.0': + resolution: {integrity: sha512-PsSugIf9ip1H/mWKj4bi/BlEoerxXAda9ByRFsYuwsmr6af9NxJL0AaiNXs8Le7R21QR5KMiD/KdxZZ71LjAxQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: '>=9.0.0' @@ -565,63 +568,63 @@ packages: '@types/serve-static@2.2.0': resolution: {integrity: sha512-8mam4H1NHLtu7nmtalF7eyBH14QyOASmcxHhSfEoRyr0nP/YdoesEtU+uSRvMe96TW/HPTtkoKqQLl53N7UXMQ==} - '@typescript-eslint/eslint-plugin@8.50.0': - resolution: {integrity: sha512-O7QnmOXYKVtPrfYzMolrCTfkezCJS9+ljLdKW/+DCvRsc3UAz+sbH6Xcsv7p30+0OwUbeWfUDAQE0vpabZ3QLg==} + '@typescript-eslint/eslint-plugin@8.53.0': + resolution: {integrity: sha512-eEXsVvLPu8Z4PkFibtuFJLJOTAV/nPdgtSjkGoPpddpFk3/ym2oy97jynY6ic2m6+nc5M8SE1e9v/mHKsulcJg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.50.0 + '@typescript-eslint/parser': ^8.53.0 eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.50.0': - resolution: {integrity: sha512-6/cmF2piao+f6wSxUsJLZjck7OQsYyRtcOZS02k7XINSNlz93v6emM8WutDQSXnroG2xwYlEVHJI+cPA7CPM3Q==} + '@typescript-eslint/parser@8.53.0': + resolution: {integrity: sha512-npiaib8XzbjtzS2N4HlqPvlpxpmZ14FjSJrteZpPxGUaYPlvhzlzUZ4mZyABo0EFrOWnvyd0Xxroq//hKhtAWg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.50.0': - resolution: {integrity: sha512-Cg/nQcL1BcoTijEWyx4mkVC56r8dj44bFDvBdygifuS20f3OZCHmFbjF34DPSi07kwlFvqfv/xOLnJ5DquxSGQ==} + '@typescript-eslint/project-service@8.53.0': + resolution: {integrity: sha512-Bl6Gdr7NqkqIP5yP9z1JU///Nmes4Eose6L1HwpuVHwScgDPPuEWbUVhvlZmb8hy0vX9syLk5EGNL700WcBlbg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/scope-manager@8.50.0': - resolution: {integrity: sha512-xCwfuCZjhIqy7+HKxBLrDVT5q/iq7XBVBXLn57RTIIpelLtEIZHXAF/Upa3+gaCpeV1NNS5Z9A+ID6jn50VD4A==} + '@typescript-eslint/scope-manager@8.53.0': + resolution: {integrity: sha512-kWNj3l01eOGSdVBnfAF2K1BTh06WS0Yet6JUgb9Cmkqaz3Jlu0fdVUjj9UI8gPidBWSMqDIglmEXifSgDT/D0g==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/tsconfig-utils@8.50.0': - resolution: {integrity: sha512-vxd3G/ybKTSlm31MOA96gqvrRGv9RJ7LGtZCn2Vrc5htA0zCDvcMqUkifcjrWNNKXHUU3WCkYOzzVSFBd0wa2w==} + '@typescript-eslint/tsconfig-utils@8.53.0': + resolution: {integrity: sha512-K6Sc0R5GIG6dNoPdOooQ+KtvT5KCKAvTcY8h2rIuul19vxH5OTQk7ArKkd4yTzkw66WnNY0kPPzzcmWA+XRmiA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.50.0': - resolution: {integrity: sha512-7OciHT2lKCewR0mFoBrvZJ4AXTMe/sYOe87289WAViOocEmDjjv8MvIOT2XESuKj9jp8u3SZYUSh89QA4S1kQw==} + '@typescript-eslint/type-utils@8.53.0': + resolution: {integrity: sha512-BBAUhlx7g4SmcLhn8cnbxoxtmS7hcq39xKCgiutL3oNx1TaIp+cny51s8ewnKMpVUKQUGb41RAUWZ9kxYdovuw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/types@8.50.0': - resolution: {integrity: sha512-iX1mgmGrXdANhhITbpp2QQM2fGehBse9LbTf0sidWK6yg/NE+uhV5dfU1g6EYPlcReYmkE9QLPq/2irKAmtS9w==} + '@typescript-eslint/types@8.53.0': + resolution: {integrity: sha512-Bmh9KX31Vlxa13+PqPvt4RzKRN1XORYSLlAE+sO1i28NkisGbTtSLFVB3l7PWdHtR3E0mVMuC7JilWJ99m2HxQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/typescript-estree@8.50.0': - resolution: {integrity: sha512-W7SVAGBR/IX7zm1t70Yujpbk+zdPq/u4soeFSknWFdXIFuWsBGBOUu/Tn/I6KHSKvSh91OiMuaSnYp3mtPt5IQ==} + '@typescript-eslint/typescript-estree@8.53.0': + resolution: {integrity: sha512-pw0c0Gdo7Z4xOG987u3nJ8akL9093yEEKv8QTJ+Bhkghj1xyj8cgPaavlr9rq8h7+s6plUJ4QJYw2gCZodqmGw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/utils@8.50.0': - resolution: {integrity: sha512-87KgUXET09CRjGCi2Ejxy3PULXna63/bMYv72tCAlDJC3Yqwln0HiFJ3VJMst2+mEtNtZu5oFvX4qJGjKsnAgg==} + '@typescript-eslint/utils@8.53.0': + resolution: {integrity: sha512-XDY4mXTez3Z1iRDI5mbRhH4DFSt46oaIFsLg+Zn97+sYrXACziXSQcSelMybnVZ5pa1P6xYkPr5cMJyunM1ZDA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/visitor-keys@8.50.0': - resolution: {integrity: sha512-Xzmnb58+Db78gT/CCj/PVCvK+zxbnsw6F+O1oheYszJbBSdEjVhQi3C/Xttzxgi/GLmpvOggRs1RFpiJ8+c34Q==} + '@typescript-eslint/visitor-keys@8.53.0': + resolution: {integrity: sha512-LZ2NqIHFhvFwxG0qZeLL9DvdNAHPGCY5dIRwBhyYeU+LfLhcStE1ImjsuTG/WaVh3XysGaeLW8Rqq7cGkPCFvw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} accepts@1.3.8: @@ -869,6 +872,10 @@ packages: resolution: {integrity: sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + eslint-visitor-keys@5.0.0: + resolution: {integrity: sha512-A0XeIi7CXU7nPlfHS9loMYEKxUaONu/hTEzHTGba9Huu94Cq1hPivf+DE5erJozZOky0LfvXAyrV/tcswpLI0Q==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} + eslint@9.39.2: resolution: {integrity: sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -883,8 +890,12 @@ packages: resolution: {integrity: sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - esquery@1.6.0: - resolution: {integrity: sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==} + espree@11.0.0: + resolution: {integrity: sha512-+gMeWRrIh/NsG+3NaLeWHuyeyk70p2tbvZIWBYcqQ4/7Xvars6GYTZNhF1sIeLcc6Wb11He5ffz3hsHyXFrw5A==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} + + esquery@1.7.0: + resolution: {integrity: sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==} engines: {node: '>=0.10'} esrecurse@4.3.0: @@ -1003,8 +1014,8 @@ packages: resolution: {integrity: sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==} engines: {node: '>=18'} - globals@16.5.0: - resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==} + globals@17.0.0: + resolution: {integrity: sha512-gv5BeD2EssA793rlFWVPMMCqefTlpusw6/2TbAVMy0FzcG8wKJn4O+NqJ4+XWmmwrayJgw5TzrmWjFgmz1XPqw==} engines: {node: '>=18'} gopd@1.2.0: @@ -1544,8 +1555,8 @@ packages: resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} hasBin: true - ts-api-utils@2.1.0: - resolution: {integrity: sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==} + ts-api-utils@2.4.0: + resolution: {integrity: sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==} engines: {node: '>=18.12'} peerDependencies: typescript: '>=4.8.4' @@ -1625,6 +1636,9 @@ packages: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} + zod@4.3.5: + resolution: {integrity: sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==} + snapshots: '@esbuild/aix-ppc64@0.27.1': @@ -1705,7 +1719,7 @@ snapshots: '@esbuild/win32-x64@0.27.1': optional: true - '@eslint-community/eslint-utils@4.9.0(eslint@9.39.2(jiti@2.6.1))': + '@eslint-community/eslint-utils@4.9.1(eslint@9.39.2(jiti@2.6.1))': dependencies: eslint: 9.39.2(jiti@2.6.1) eslint-visitor-keys: 3.4.3 @@ -1856,13 +1870,13 @@ snapshots: '@pinojs/redact@0.4.0': {} - '@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@2.6.1))': + '@stylistic/eslint-plugin@5.7.0(eslint@9.39.2(jiti@2.6.1))': dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1)) - '@typescript-eslint/types': 8.50.0 + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1)) + '@typescript-eslint/types': 8.53.0 eslint: 9.39.2(jiti@2.6.1) - eslint-visitor-keys: 4.2.1 - espree: 10.4.0 + eslint-visitor-keys: 5.0.0 + espree: 11.0.0 estraverse: 5.3.0 picomatch: 4.0.3 @@ -1988,95 +2002,95 @@ snapshots: '@types/http-errors': 2.0.5 '@types/node': 24.10.2 - '@typescript-eslint/eslint-plugin@8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/eslint-plugin@8.53.0(@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/type-utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/parser': 8.53.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/type-utils': 8.53.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.53.0 eslint: 9.39.2(jiti@2.6.1) ignore: 7.0.5 natural-compare: 1.4.0 - ts-api-utils: 2.1.0(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/parser@8.53.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/typescript-estree': 8.53.0(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.53.0 debug: 4.4.3 eslint: 9.39.2(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.50.0(typescript@5.9.3)': + '@typescript-eslint/project-service@8.53.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.50.0(typescript@5.9.3) - '@typescript-eslint/types': 8.50.0 + '@typescript-eslint/tsconfig-utils': 8.53.0(typescript@5.9.3) + '@typescript-eslint/types': 8.53.0 debug: 4.4.3 typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/scope-manager@8.50.0': + '@typescript-eslint/scope-manager@8.53.0': dependencies: - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/visitor-keys': 8.53.0 - '@typescript-eslint/tsconfig-utils@8.50.0(typescript@5.9.3)': + '@typescript-eslint/tsconfig-utils@8.53.0(typescript@5.9.3)': dependencies: typescript: 5.9.3 - '@typescript-eslint/type-utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/type-utils@8.53.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/typescript-estree': 8.53.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.53.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) debug: 4.4.3 eslint: 9.39.2(jiti@2.6.1) - ts-api-utils: 2.1.0(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/types@8.50.0': {} + '@typescript-eslint/types@8.53.0': {} - '@typescript-eslint/typescript-estree@8.50.0(typescript@5.9.3)': + '@typescript-eslint/typescript-estree@8.53.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/project-service': 8.50.0(typescript@5.9.3) - '@typescript-eslint/tsconfig-utils': 8.50.0(typescript@5.9.3) - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/project-service': 8.53.0(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.53.0(typescript@5.9.3) + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/visitor-keys': 8.53.0 debug: 4.4.3 minimatch: 9.0.5 semver: 7.7.3 tinyglobby: 0.2.15 - ts-api-utils: 2.1.0(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/utils@8.53.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1)) - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1)) + '@typescript-eslint/scope-manager': 8.53.0 + '@typescript-eslint/types': 8.53.0 + '@typescript-eslint/typescript-estree': 8.53.0(typescript@5.9.3) eslint: 9.39.2(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/visitor-keys@8.50.0': + '@typescript-eslint/visitor-keys@8.53.0': dependencies: - '@typescript-eslint/types': 8.50.0 + '@typescript-eslint/types': 8.53.0 eslint-visitor-keys: 4.2.1 accepts@1.3.8: @@ -2318,9 +2332,11 @@ snapshots: eslint-visitor-keys@4.2.1: {} + eslint-visitor-keys@5.0.0: {} + eslint@9.39.2(jiti@2.6.1): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1)) + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1)) '@eslint-community/regexpp': 4.12.2 '@eslint/config-array': 0.21.1 '@eslint/config-helpers': 0.4.2 @@ -2340,7 +2356,7 @@ snapshots: eslint-scope: 8.4.0 eslint-visitor-keys: 4.2.1 espree: 10.4.0 - esquery: 1.6.0 + esquery: 1.7.0 esutils: 2.0.3 fast-deep-equal: 3.1.3 file-entry-cache: 8.0.0 @@ -2365,7 +2381,13 @@ snapshots: acorn-jsx: 5.3.2(acorn@8.15.0) eslint-visitor-keys: 4.2.1 - esquery@1.6.0: + espree@11.0.0: + dependencies: + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) + eslint-visitor-keys: 5.0.0 + + esquery@1.7.0: dependencies: estraverse: 5.3.0 @@ -2504,7 +2526,7 @@ snapshots: globals@14.0.0: {} - globals@16.5.0: {} + globals@17.0.0: {} gopd@1.2.0: {} @@ -3038,7 +3060,7 @@ snapshots: tree-kill@1.2.2: {} - ts-api-utils@2.1.0(typescript@5.9.3): + ts-api-utils@2.4.0(typescript@5.9.3): dependencies: typescript: 5.9.3 @@ -3106,3 +3128,5 @@ snapshots: yargs-parser: 21.1.1 yocto-queue@0.1.0: {} + + zod@4.3.5: {}