diff --git a/.env.example b/.env.example index 3d62dc2..2b28f64 100644 --- a/.env.example +++ b/.env.example @@ -17,10 +17,9 @@ DATABASE_URL=postgresql://postgres:devpassword@localhost:5432/typelets_local # For production (example): # DATABASE_URL=postgresql://username:password@hostname:5432/database?sslmode=require -# Database Connection Pooling (OPTIONAL) -# DB_POOL_MAX=20 # Maximum connections in pool (default: 20) -# DB_IDLE_TIMEOUT=20 # Close idle connections after N seconds (default: 20) -# DB_CONNECT_TIMEOUT=10 # Connection timeout in seconds (default: 10) +# Valkey Cache (OPTIONAL - for performance optimization) +# VALKEY_HOST=your-cluster.serverless.use1.cache.amazonaws.com +# VALKEY_PORT=6379 # Clerk Authentication (REQUIRED) # Get your secret key from: https://dashboard.clerk.com/ @@ -31,6 +30,7 @@ CLERK_SECRET_KEY=sk_test_your_actual_clerk_secret_key_from_dashboard # CORS Origins (REQUIRED) # Comma-separated list of allowed origins (no spaces after commas) # Include all frontend domains that will access this API +# If not set, all cross-origin requests will be blocked CORS_ORIGINS=http://localhost:5173,http://localhost:3000 # Production example: # CORS_ORIGINS=https://app.typelets.com,https://typelets.com @@ -66,10 +66,10 @@ MESSAGE_AUTH_SECRET=your-very-secure-random-string-here-min-32-chars # HTTP_FILE_RATE_LIMIT_MAX=100 # Max file operations per window # WebSocket Rate Limiting -WS_RATE_LIMIT_WINDOW_MS=60000 # Time window: 1 minute (in milliseconds) -WS_RATE_LIMIT_MAX_MESSAGES=300 # Max messages per window per connection -WS_MAX_CONNECTIONS_PER_USER=20 # Max concurrent connections per user -WS_AUTH_TIMEOUT_MS=30000 # Authentication timeout: 30 seconds +WS_RATE_LIMIT_WINDOW_MS=60000# Time window: 1 minute (in milliseconds) +WS_RATE_LIMIT_MAX_MESSAGES=300# Max messages per window per connection +WS_MAX_CONNECTIONS_PER_USER=20# Max concurrent connections per user +WS_AUTH_TIMEOUT_MS=30000# Authentication timeout: 30 seconds # Production Rate Limiting Recommendations: # - HTTP: 500-1000 requests per 15 minutes per user @@ -82,7 +82,7 @@ WS_AUTH_TIMEOUT_MS=30000 # Authentication timeout: 30 seconds # ================================================================ # File Size Limits -MAX_FILE_SIZE_MB=50 # Maximum size per file (default: 50MB) +MAX_FILE_SIZE_MB=50# Maximum size per file (default: 50MB) # MAX_NOTE_SIZE_MB=1024 # Maximum total attachments per note (default: 1GB) # Allowed File Types (handled in code, documented here) @@ -107,8 +107,8 @@ MAX_FILE_SIZE_MB=50 # Maximum size per file (default: 50MB) # ================================================================ # Free Tier Limits -FREE_TIER_STORAGE_GB=1 # Storage limit for free users (default: 1GB) -FREE_TIER_NOTE_LIMIT=100 # Note count limit for free users (default: 100) +FREE_TIER_STORAGE_GB=1# Storage limit for free users (default: 1GB) +FREE_TIER_NOTE_LIMIT=100# Note count limit for free users (default: 100) # Usage tracking for billing analytics # These limits trigger billing events logged to console @@ -118,6 +118,12 @@ FREE_TIER_NOTE_LIMIT=100 # Note count limit for free users (default # LOGGING & MONITORING # ================================================================ +# New Relic APM (OPTIONAL - for production monitoring) +# Get your license key from: https://one.newrelic.com +# NEW_RELIC_APP_NAME=Typelets API +# NEW_RELIC_LICENSE_KEY=your_license_key_here +# NEW_RELIC_LOG_LEVEL=warn # Options: error, warn, info, debug, trace (default: warn in dev, info in prod) + # Debug Logging # DEBUG=false # Set to true for verbose logging (not recommended in production) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0a0a1d0..60dff72 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,29 +9,29 @@ on: jobs: test: runs-on: ubuntu-latest - + strategy: matrix: node-version: [20.x, 22.x] - + steps: - uses: actions/checkout@v4 - + - name: Use Node.js ${{ matrix.node-version }} uses: actions/setup-node@v4 with: node-version: ${{ matrix.node-version }} - + - name: Setup pnpm uses: pnpm/action-setup@v4 with: version: 9.15.0 - + - name: Install dependencies run: pnpm install - + - name: Run linting run: pnpm run lint - + - name: Run build - run: NODE_OPTIONS="--max-old-space-size=8192" pnpm run build \ No newline at end of file + run: NODE_OPTIONS="--max-old-space-size=8192" pnpm run build diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 810dd25..1d372a9 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -6,7 +6,7 @@ on: pull_request: branches: [main] schedule: - - cron: '0 0 * * 0' + - cron: "0 0 * * 0" jobs: analyze: @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - language: ['typescript'] + language: ["typescript"] steps: - name: Checkout repository @@ -34,8 +34,8 @@ jobs: - name: Setup Node.js uses: actions/setup-node@v4 with: - node-version: '20' - + node-version: "20" + - name: Setup pnpm uses: pnpm/action-setup@v4 with: @@ -48,4 +48,4 @@ jobs: run: NODE_OPTIONS="--max-old-space-size=8192" pnpm run build - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 \ No newline at end of file + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/new-relic-change-tracking.yml b/.github/workflows/new-relic-change-tracking.yml new file mode 100644 index 0000000..c3bee8e --- /dev/null +++ b/.github/workflows/new-relic-change-tracking.yml @@ -0,0 +1,21 @@ +name: Change Tracking Marker +on: + release: + types: [published] + +jobs: + newrelic: + runs-on: ubuntu-latest + name: New Relic + steps: + # This step builds a var with the release tag value to use later + - name: Set Release Version from Tag + run: echo "RELEASE_VERSION=${{ github.ref_name }}" >> $GITHUB_ENV + # This step creates a new Change Tracking Marker + - name: New Relic Application Deployment Marker + uses: newrelic/deployment-marker-action@v2.3.0 + with: + apiKey: ${{ secrets.NEW_RELIC_API_KEY }} + guid: ${{ secrets.NEW_RELIC_DEPLOYMENT_ENTITY_GUID }} + version: "${{ env.RELEASE_VERSION }}" + user: "${{ github.actor }}" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index bcf2999..a0c3a85 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,34 +7,34 @@ on: jobs: release: runs-on: ubuntu-latest - + permissions: contents: write issues: write pull-requests: write - + steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - + - name: Setup Node.js uses: actions/setup-node@v4 with: - node-version: '20' - + node-version: "20" + - name: Setup pnpm uses: pnpm/action-setup@v4 with: version: 9.15.0 - + - name: Install dependencies run: pnpm install - + - name: Build run: NODE_OPTIONS="--max-old-space-size=8192" pnpm run build - + - name: Release env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: npx semantic-release \ No newline at end of file + run: npx semantic-release diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..85ead18 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,38 @@ +# Dependencies +node_modules/ +pnpm-lock.yaml +package-lock.json +yarn.lock + +# Build outputs +dist/ +build/ +.next/ +out/ + +# Cache +.cache/ +.turbo/ + +# Logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Environment files +.env +.env.local +.env.*.local + +# Coverage +coverage/ +.nyc_output/ + +# Misc +.DS_Store +*.pem + +# Config files +*.config.js +*.config.mjs diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 0000000..cb2c293 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,10 @@ +{ + "semi": true, + "trailingComma": "es5", + "singleQuote": false, + "printWidth": 100, + "tabWidth": 2, + "useTabs": false, + "arrowParens": "always", + "endOfLine": "auto" +} diff --git a/.releaserc.json b/.releaserc.json index 4960463..73fde8d 100644 --- a/.releaserc.json +++ b/.releaserc.json @@ -19,12 +19,7 @@ [ "@semantic-release/git", { - "assets": [ - "CHANGELOG.md", - "package.json", - "package-lock.json", - "src/version.ts" - ], + "assets": ["CHANGELOG.md", "package.json", "package-lock.json", "src/version.ts"], "message": "chore(release): ${nextRelease.version} [skip ci]\n\n${nextRelease.notes}" } ], diff --git a/CHANGELOG.md b/CHANGELOG.md index 5fc4271..6860d19 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,80 +14,69 @@ ## [1.3.1](https://github.com/typelets/typelets-api/compare/v1.3.0...v1.3.1) (2025-09-25) - ### Bug Fixes -* enforce encrypted data validation to prevent plaintext exposure ([0cc6f77](https://github.com/typelets/typelets-api/commit/0cc6f77fcbf9def845e1fae0b871c27f7b1fad95)) +- enforce encrypted data validation to prevent plaintext exposure ([0cc6f77](https://github.com/typelets/typelets-api/commit/0cc6f77fcbf9def845e1fae0b871c27f7b1fad95)) # [1.3.0](https://github.com/typelets/typelets-api/compare/v1.2.0...v1.3.0) (2025-09-23) - ### Features -* add comprehensive Sentry monitoring and fix 429 rate limiting error ([6ba5744](https://github.com/typelets/typelets-api/commit/6ba5744022d075216e8053b6c2127cbb38a4824e)) +- add comprehensive Sentry monitoring and fix 429 rate limiting error ([6ba5744](https://github.com/typelets/typelets-api/commit/6ba5744022d075216e8053b6c2127cbb38a4824e)) # [1.2.0](https://github.com/typelets/typelets-api/compare/v1.1.1...v1.2.0) (2025-09-20) - ### Features -* **ci:** add Husky pre-commit hooks to prevent CI failures ([346bc2b](https://github.com/typelets/typelets-api/commit/346bc2bcd087d5000b7cc21032561c60baf43dda)) -* **ci:** add Husky pre-commit hooks to prevent CI failures ([e2b1017](https://github.com/typelets/typelets-api/commit/e2b1017d1bbb133dd1067ef9234fb463ad89f15d)) -* **code:** add secure API proxy for code execution ([8d599b5](https://github.com/typelets/typelets-api/commit/8d599b5c6c72e3ae871a9cf71b9304fb8541828e)) +- **ci:** add Husky pre-commit hooks to prevent CI failures ([346bc2b](https://github.com/typelets/typelets-api/commit/346bc2bcd087d5000b7cc21032561c60baf43dda)) +- **ci:** add Husky pre-commit hooks to prevent CI failures ([e2b1017](https://github.com/typelets/typelets-api/commit/e2b1017d1bbb133dd1067ef9234fb463ad89f15d)) +- **code:** add secure API proxy for code execution ([8d599b5](https://github.com/typelets/typelets-api/commit/8d599b5c6c72e3ae871a9cf71b9304fb8541828e)) ## [1.1.1](https://github.com/typelets/typelets-api/compare/v1.1.0...v1.1.1) (2025-09-16) - ### Bug Fixes -* make note title field optional for encrypted notes ([ec19a48](https://github.com/typelets/typelets-api/commit/ec19a48954a10eb2a2531c3195fc5fe6b3430d70)) +- make note title field optional for encrypted notes ([ec19a48](https://github.com/typelets/typelets-api/commit/ec19a48954a10eb2a2531c3195fc5fe6b3430d70)) # [1.1.0](https://github.com/typelets/typelets-api/compare/v1.0.4...v1.1.0) (2025-09-15) - ### Features -* **websocket:** implement real-time sync with HMAC authentication and fix folder moves ([8de85b7](https://github.com/typelets/typelets-api/commit/8de85b7eae38b9af76154e40cdeff53d771f6e92)) +- **websocket:** implement real-time sync with HMAC authentication and fix folder moves ([8de85b7](https://github.com/typelets/typelets-api/commit/8de85b7eae38b9af76154e40cdeff53d771f6e92)) ## [1.0.4](https://github.com/typelets/typelets-api/compare/v1.0.3...v1.0.4) (2025-09-10) - ### Bug Fixes -* bundle API with esbuild to resolve directory import errors ([4644c0e](https://github.com/typelets/typelets-api/commit/4644c0e3d2de2eb5796abab36b931615dc81eead)) +- bundle API with esbuild to resolve directory import errors ([4644c0e](https://github.com/typelets/typelets-api/commit/4644c0e3d2de2eb5796abab36b931615dc81eead)) ## [1.0.3](https://github.com/typelets/typelets-api/compare/v1.0.2...v1.0.3) (2025-09-10) - ### Bug Fixes -* include root-level TypeScript files in esbuild output ([cf9bb4f](https://github.com/typelets/typelets-api/commit/cf9bb4fda0fa19925122b816d0375c88c4f39e05)) +- include root-level TypeScript files in esbuild output ([cf9bb4f](https://github.com/typelets/typelets-api/commit/cf9bb4fda0fa19925122b816d0375c88c4f39e05)) ## [1.0.2](https://github.com/typelets/typelets-api/compare/v1.0.1...v1.0.2) (2025-09-10) - ### Bug Fixes -* replace tsc with esbuild to resolve build hanging issue ([235fce7](https://github.com/typelets/typelets-api/commit/235fce77cdde4e2287fe8b25acc7bcb96deb6ff8)) +- replace tsc with esbuild to resolve build hanging issue ([235fce7](https://github.com/typelets/typelets-api/commit/235fce77cdde4e2287fe8b25acc7bcb96deb6ff8)) ## [1.0.1](https://github.com/typelets/typelets-api/compare/v1.0.0...v1.0.1) (2025-09-10) - ### Bug Fixes -* update Dockerfile to use pnpm instead of npm ([13e9639](https://github.com/typelets/typelets-api/commit/13e963965c7e5fa0e060ba8a0d8995eee761620b)) +- update Dockerfile to use pnpm instead of npm ([13e9639](https://github.com/typelets/typelets-api/commit/13e963965c7e5fa0e060ba8a0d8995eee761620b)) # 1.0.0 (2025-09-09) - ### Bug Fixes -* remove ES module configuration to fix semantic-release scripts ([f869d14](https://github.com/typelets/typelets-api/commit/f869d14cf42b35d119d11e3e25daff98060b7129)) - +- remove ES module configuration to fix semantic-release scripts ([f869d14](https://github.com/typelets/typelets-api/commit/f869d14cf42b35d119d11e3e25daff98060b7129)) ### Features -* initial open source release of Typelets API ([66a3d30](https://github.com/typelets/typelets-api/commit/66a3d30dcbc0a33c4118c6948d9537e885298039)) +- initial open source release of Typelets API ([66a3d30](https://github.com/typelets/typelets-api/commit/66a3d30dcbc0a33c4118c6948d9537e885298039)) # Changelog diff --git a/README.md b/README.md index c6eefd3..da2e615 100644 --- a/README.md +++ b/README.md @@ -21,18 +21,23 @@ The backend API for the [Typelets Application](https://github.com/typelets/typel - ๐Ÿ”„ **Real-time Sync** via WebSockets for multi-device support - โšก **Fast & Type-Safe** with TypeScript and Hono - ๐Ÿ˜ **PostgreSQL** with Drizzle ORM +- ๐Ÿš€ **Valkey/Redis Caching** for high-performance data access with cluster support +- ๐Ÿ“Š **New Relic APM** integration for monitoring, metrics, and error tracking - ๐Ÿ’ป **Code Execution** via secure Judge0 API proxy -- ๐Ÿ“Š **Production Ready** with structured logging and error handling +- ๐Ÿ›ก๏ธ **Comprehensive Rate Limiting** for HTTP, WebSocket, file uploads, and code execution - ๐Ÿฅ **Health Checks** with detailed system status and readiness probes +- ๐Ÿ“ˆ **Structured Logging** with automatic metrics and business event tracking ## Tech Stack - **Runtime**: Node.js 20+ (LTS recommended) - **Framework**: [Hono](https://hono.dev/) - Fast, lightweight web framework - **Database**: PostgreSQL with [Drizzle ORM](https://orm.drizzle.team/) +- **Cache**: Valkey/Redis Cluster for high-performance caching - **Authentication**: [Clerk](https://clerk.com/) - **Validation**: [Zod](https://zod.dev/) -- **Logging**: Structured console logging for development and production +- **Monitoring**: [New Relic APM](https://newrelic.com/) for metrics, logging, and error tracking +- **Logging**: Structured JSON logging with automatic New Relic integration - **TypeScript**: Strict mode enabled for type safety ## Prerequisites @@ -41,6 +46,8 @@ The backend API for the [Typelets Application](https://github.com/typelets/typel - pnpm 9.15.0+ - PostgreSQL database (local installation or Docker) - Clerk account for authentication ([sign up here](https://dashboard.clerk.com)) +- Valkey/Redis cluster for caching (optional - improves performance) +- New Relic account for monitoring (optional - [sign up here](https://newrelic.com/signup)) - Judge0 API key for code execution (optional - [get from RapidAPI](https://rapidapi.com/judge0-official/api/judge0-ce)) ## Local Development Setup @@ -222,12 +229,14 @@ All `/api/*` endpoints require authentication via Bearer token in the Authorizat The API provides real-time synchronization via WebSocket connection at `ws://localhost:3000` (or your configured port). **Connection Flow:** + 1. Connect to WebSocket endpoint 2. Send authentication message with Clerk JWT token 3. Join/leave specific notes for real-time updates 4. Receive real-time sync messages for notes and folders **Message Types:** + - `auth` - Authenticate with JWT token - `ping`/`pong` - Heartbeat messages - `join_note`/`leave_note` - Subscribe/unsubscribe from note updates @@ -236,6 +245,7 @@ The API provides real-time synchronization via WebSocket connection at `ws://loc - `folder_created`/`folder_updated`/`folder_deleted` - Folder events **Security Features:** + - JWT authentication required for all operations - Authorization checks ensure users only access their own notes/folders - Rate limiting (configurable, default: 300 messages per minute per connection) @@ -261,29 +271,43 @@ The application uses the following main tables: - **WebSocket Security**: JWT authentication, rate limiting, and connection limits - **Real-time Authorization**: Database-level ownership validation for all WebSocket operations - ## Environment Variables -| Variable | Description | Required | Default | -| ---------------------------- | -------------------------------------------- | -------- | ----------- | -| `DATABASE_URL` | PostgreSQL connection string | Yes | - | -| `CLERK_SECRET_KEY` | Clerk secret key for JWT verification | Yes | - | -| `CORS_ORIGINS` | Comma-separated list of allowed CORS origins | Yes | - | -| `PORT` | Server port | No | 3000 | -| `NODE_ENV` | Environment (development/production) | No | development | -| `MAX_FILE_SIZE_MB` | Maximum size per file in MB | No | 50 | -| `MAX_NOTE_SIZE_MB` | Maximum total attachments per note in MB | No | 1024 (1GB) | -| `FREE_TIER_STORAGE_GB` | Free tier storage limit in GB | No | 1 | -| `FREE_TIER_NOTE_LIMIT` | Free tier note count limit | No | 100 | -| `DEBUG` | Enable debug logging in production | No | false | -| `WS_RATE_LIMIT_WINDOW_MS` | WebSocket rate limit window in milliseconds | No | 60000 (1 min) | -| `WS_RATE_LIMIT_MAX_MESSAGES` | Max WebSocket messages per window | No | 300 | -| `WS_MAX_CONNECTIONS_PER_USER`| Max WebSocket connections per user | No | 20 | -| `WS_AUTH_TIMEOUT_MS` | WebSocket authentication timeout in milliseconds | No | 30000 (30 sec) | -| `JUDGE0_API_KEY` | Judge0 API key for code execution | No* | - | -| `LOG_LEVEL` | Logging level (error/warn/info/debug) | No | info (prod), debug (dev) | - -*Required only for code execution features +| Variable | Description | Required | Default | +| ------------------------------ | ------------------------------------------------ | -------- | -------------------------------- | +| `DATABASE_URL` | PostgreSQL connection string | Yes | - | +| `CLERK_SECRET_KEY` | Clerk secret key for JWT verification | Yes | - | +| `CORS_ORIGINS` | Comma-separated list of allowed CORS origins | Yes | - | +| `PORT` | Server port | No | 3000 | +| `NODE_ENV` | Environment (development/production) | No | development | +| **Caching (Optional)** | | | | +| `VALKEY_HOST` | Valkey/Redis cluster hostname | No | - | +| `VALKEY_PORT` | Valkey/Redis cluster port | No | 6379 | +| **Monitoring (Optional)** | | | | +| `NEW_RELIC_APP_NAME` | Application name in New Relic | No | Typelets API | +| `NEW_RELIC_LICENSE_KEY` | New Relic license key | No | - | +| `NEW_RELIC_LOG_LEVEL` | New Relic log level (error/warn/info/debug) | No | warn (dev), info (prod) | +| **Rate Limiting** | | | | +| `HTTP_RATE_LIMIT_WINDOW_MS` | HTTP rate limit window in milliseconds | No | 900000 (15 min) | +| `HTTP_RATE_LIMIT_MAX_REQUESTS` | Max HTTP requests per window | No | 1000 | +| `HTTP_FILE_RATE_LIMIT_MAX` | Max file operations per window | No | 100 | +| `WS_RATE_LIMIT_WINDOW_MS` | WebSocket rate limit window in milliseconds | No | 60000 (1 min) | +| `WS_RATE_LIMIT_MAX_MESSAGES` | Max WebSocket messages per window | No | 300 | +| `WS_MAX_CONNECTIONS_PER_USER` | Max WebSocket connections per user | No | 20 | +| `WS_AUTH_TIMEOUT_MS` | WebSocket authentication timeout in milliseconds | No | 30000 (30 sec) | +| `CODE_EXEC_RATE_LIMIT_MAX` | Max code executions per window | No | 100 (dev), 50 (prod) | +| `CODE_EXEC_RATE_WINDOW_MS` | Code execution rate limit window in milliseconds | No | 900000 (15 min) | +| **File & Storage** | | | | +| `MAX_FILE_SIZE_MB` | Maximum size per file in MB | No | 50 | +| `MAX_NOTE_SIZE_MB` | Maximum total attachments per note in MB | No | 1024 (1GB) | +| `FREE_TIER_STORAGE_GB` | Free tier storage limit in GB | No | 1 | +| `FREE_TIER_NOTE_LIMIT` | Free tier note count limit | No | 100 | +| **Code Execution (Optional)** | | | | +| `JUDGE0_API_KEY` | Judge0 API key for code execution | No\* | - | +| `JUDGE0_API_URL` | Judge0 API base URL | No | https://judge0-ce.p.rapidapi.com | +| `JUDGE0_API_HOST` | Judge0 API host header | No | judge0-ce.p.rapidapi.com | + +\*Required only for code execution features ## Development @@ -295,15 +319,20 @@ src/ โ”‚ โ”œโ”€โ”€ index.ts # Database connection โ”‚ โ””โ”€โ”€ schema.ts # Database schema definitions โ”œโ”€โ”€ lib/ +โ”‚ โ”œโ”€โ”€ cache.ts # Valkey/Redis cluster caching layer +โ”‚ โ”œโ”€โ”€ cache-keys.ts # Centralized cache key patterns and TTL values +โ”‚ โ”œโ”€โ”€ logger.ts # Structured logging with New Relic integration โ”‚ โ””โ”€โ”€ validation.ts # Zod validation schemas โ”œโ”€โ”€ middleware/ โ”‚ โ”œโ”€โ”€ auth.ts # Authentication middleware โ”‚ โ”œโ”€โ”€ rate-limit.ts # Rate limiting middleware -โ”‚ โ””โ”€โ”€ security.ts # Security headers middleware +โ”‚ โ”œโ”€โ”€ security.ts # Security headers middleware +โ”‚ โ””โ”€โ”€ usage.ts # Storage and usage limit enforcement โ”œโ”€โ”€ routes/ โ”‚ โ”œโ”€โ”€ code.ts # Code execution routes (Judge0 proxy) โ”‚ โ”œโ”€โ”€ files.ts # File attachment routes -โ”‚ โ”œโ”€โ”€ folders.ts # Folder management routes +โ”‚ โ”œโ”€โ”€ folders.ts # Folder management routes with caching +โ”‚ โ”œโ”€โ”€ metrics.ts # Health checks and system metrics โ”‚ โ”œโ”€โ”€ notes.ts # Note management routes โ”‚ โ””โ”€โ”€ users.ts # User profile routes โ”œโ”€โ”€ types/ @@ -320,7 +349,7 @@ src/ โ”‚ โ”‚ โ””โ”€โ”€ rate-limiter.ts # WebSocket rate limiting โ”‚ โ”œโ”€โ”€ types.ts # WebSocket message types โ”‚ โ””โ”€โ”€ index.ts # Main WebSocket server manager -โ””โ”€โ”€ server.ts # Application entry point +โ””โ”€โ”€ server.ts # Application entry point with New Relic initialization ``` ### Type Safety @@ -381,7 +410,6 @@ This application is designed for production deployment using AWS ECS (Elastic Co For production deployment, configure the same environment variables in your ECS task definition that you use locally in `.env`. - ## Contributing We welcome contributions from the community! diff --git a/SECURITY.md b/SECURITY.md index f4ed660..bc2186c 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -11,23 +11,27 @@ We actively maintain security updates for the following versions: ## Security Features ### Authentication & Authorization + - **JWT Authentication**: Secure token-based authentication using Clerk - **User Scoped Access**: All data operations are scoped to authenticated users - **Session Management**: Automatic token validation and user context ### Input Validation & Sanitization + - **Zod Schema Validation**: Comprehensive input validation on all endpoints - **SQL Injection Prevention**: Parameterized queries with Drizzle ORM - **File Upload Security**: Restricted MIME types and filename validation - **Search Input Sanitization**: Escaped special characters to prevent injection ### API Security + - **Rate Limiting**: Configurable rate limits per user/IP (100 req/15min default) - **File Upload Limits**: Stricter limits for file operations (10 req/15min) - **CORS Configuration**: Restricted to specific allowed origins - **Request Body Limits**: Configurable file size limits with 35% buffer ### Security Headers + - **Content Security Policy (CSP)**: Restrictive policy preventing XSS - **X-Frame-Options**: DENY to prevent clickjacking - **X-Content-Type-Options**: nosniff to prevent MIME confusion @@ -36,6 +40,7 @@ We actively maintain security updates for the following versions: - **Referrer-Policy**: Strict referrer policy ### WebSocket Security + - **Authentication Required**: All WS operations require valid JWT - **Connection Limits**: Maximum 20 connections per user - **Rate Limiting**: 300 messages per minute per connection @@ -44,12 +49,14 @@ We actively maintain security updates for the following versions: - **HMAC Message Authentication**: Optional cryptographic message signing for enhanced security ### Data Protection + - **Client-Side Encryption**: Optional end-to-end encryption for sensitive data - **Database Encryption**: SSL/TLS enforced for database connections - **Environment Variables**: Sensitive configuration externalized - **Error Sanitization**: Stack traces hidden in production ### Infrastructure Security + - **Database Security**: Foreign key constraints and transaction safety - **Logging**: Security events logged with unique error IDs - **Environment Separation**: Development vs production error handling @@ -61,16 +68,19 @@ We take security seriously. If you discover a security vulnerability, please fol ### 1. **DO NOT** create a public GitHub issue for security vulnerabilities ### 2. Report privately via: + - **Email**: security@typelets.com - **GitHub Security Advisories**: Use the private vulnerability reporting feature ### 3. Include in your report: + - Description of the vulnerability - Steps to reproduce the issue - Potential impact assessment - Suggested remediation (if any) ### 4. Response Timeline: + - **Initial Response**: Within 48 hours - **Assessment**: Within 5 business days - **Fix Timeline**: Varies by severity @@ -80,6 +90,7 @@ We take security seriously. If you discover a security vulnerability, please fol - Low: Next minor release ### 5. Disclosure Policy: + - We follow coordinated disclosure - Public disclosure after fix is deployed - Credit will be given to reporters (unless requested otherwise) @@ -87,6 +98,7 @@ We take security seriously. If you discover a security vulnerability, please fol ## Security Best Practices for Deployment ### Environment Configuration + ```bash # Required security environment variables CLERK_SECRET_KEY=your_clerk_secret_key @@ -101,6 +113,7 @@ CORS_ORIGINS=https://yourdomain.com,https://app.yourdomain.com ``` ### Production Deployment + - Use HTTPS only in production - Configure proper CORS origins - Set NODE_ENV=production @@ -109,12 +122,14 @@ CORS_ORIGINS=https://yourdomain.com,https://app.yourdomain.com - Regular security updates ### Rate Limiting + - Default: 100 requests per 15 minutes per user/IP - File uploads: 10 operations per 15 minutes - WebSocket: 300 messages per minute per connection - Configurable via environment variables ### File Upload Security + - Allowed MIME types: images, PDFs, text files - Maximum file size: 50MB (configurable) - Filename validation prevents path traversal @@ -123,6 +138,7 @@ CORS_ORIGINS=https://yourdomain.com,https://app.yourdomain.com ## Security Checklist for Contributors ### Code Review Requirements + - [ ] All user inputs validated with Zod schemas - [ ] Database queries use parameterized statements - [ ] No secrets in code or logs @@ -131,6 +147,7 @@ CORS_ORIGINS=https://yourdomain.com,https://app.yourdomain.com - [ ] Rate limiting considered for expensive operations ### Testing Requirements + - [ ] Security tests pass - [ ] Input validation tests included - [ ] Authentication/authorization tests cover edge cases @@ -139,12 +156,14 @@ CORS_ORIGINS=https://yourdomain.com,https://app.yourdomain.com ## Known Security Considerations ### Limitations + - In-memory rate limiting (resets on server restart) - In-memory nonce tracking (resets on server restart) - No distributed session management - Client-side encryption keys not managed by server ### Recommendations + - Use Redis for production rate limiting and nonce tracking - Implement session management for multi-server deployments - Consider external key management for enterprise use @@ -153,12 +172,15 @@ CORS_ORIGINS=https://yourdomain.com,https://app.yourdomain.com ## Security Dependencies ### Regular Updates + We monitor and update dependencies for security vulnerabilities: + - Automated dependency scanning - Regular security patches - Major version updates evaluated for security impact ### Critical Dependencies + - `@clerk/backend` - Authentication - `drizzle-orm` - Database ORM - `hono` - Web framework @@ -168,9 +190,10 @@ We monitor and update dependencies for security vulnerabilities: ## Compliance This API implements security controls aligned with: + - OWASP Web Application Security Project guidelines - Modern web security best practices - Input validation and output encoding standards - Secure authentication and session management -For questions about our security practices, please contact security@typelets.com. \ No newline at end of file +For questions about our security practices, please contact security@typelets.com. diff --git a/WEBSOCKET_INTEGRATION.md b/WEBSOCKET_INTEGRATION.md deleted file mode 100644 index f9263ce..0000000 --- a/WEBSOCKET_INTEGRATION.md +++ /dev/null @@ -1,534 +0,0 @@ -# WebSocket Integration - Backend Implementation - -This document describes the backend WebSocket implementation for real-time note synchronization in the Typelets API. - -## Overview - -The WebSocket server enables: -- **Real-time synchronization** across multiple client sessions -- **Authenticated connections** using Clerk JWT tokens -- **Rate limiting and DoS protection** for production security -- **Optional HMAC message authentication** for enhanced security -- **Connection management** with automatic cleanup - -## Architecture - -### Core Components - -1. **WebSocket Manager** (`src/websocket/index.ts`) - - Main WebSocket server management - - Message routing and handling - - Client connection lifecycle - -2. **Authentication Handler** (`src/websocket/auth/handler.ts`) - - JWT token validation via Clerk - - Optional HMAC message signing verification - - Session management and timeouts - -3. **Message Handlers** - - **Note Handler** (`src/websocket/handlers/notes.ts`) - Note operations - - **Folder Handler** (`src/websocket/handlers/folders.ts`) - Folder operations - -4. **Middleware** - - **Rate Limiter** (`src/websocket/middleware/rate-limiter.ts`) - - **Connection Manager** (`src/websocket/middleware/connection-manager.ts`) - -5. **Type Definitions** (`src/websocket/types.ts`) - - TypeScript interfaces for all WebSocket messages and connections - -## Message Protocol - -### Client โ†’ Server Messages - -```typescript -// Authentication (required first) -{ - type: "auth", - token: "clerk_jwt_token_here" -} - -// Join/leave specific notes for updates -{ - type: "join_note", - noteId: "uuid" -} - -{ - type: "leave_note", - noteId: "uuid" -} - -// Send note updates -{ - type: "note_update", - noteId: "uuid", - changes: { - title?: "New Title", - content?: "New content", - encryptedTitle?: "encrypted_title_here", - encryptedContent?: "encrypted_content_here", - folderId?: "new_folder_id", - starred?: true, - archived?: false, - deleted?: true, - hidden?: false - } -} - -// Notify of new notes/folders -{ - type: "note_created", - noteData: { id: "uuid", title: "New Note", /* full note object */ } -} - -{ - type: "folder_created", - folderData: { id: "uuid", name: "New Folder", /* full folder object */ } -} - -// Notify of deletions -{ - type: "note_deleted", - noteId: "uuid" -} - -{ - type: "folder_deleted", - folderId: "uuid" -} - -// Heartbeat -{ - type: "ping" -} -``` - -### Server โ†’ Client Messages - -```typescript -// Connection established -{ - type: "connection_established", - message: "Please authenticate to continue" -} - -// Authentication responses -{ - type: "auth_success", - message: "Authentication successful", - userId: "user_id", - sessionSecret?: "hex_string" // For HMAC authentication -} - -{ - type: "auth_failed", - message: "Authentication failed", - reason?: "token-expired" | "auth-failed" -} - -// Real-time sync messages -{ - type: "note_sync", - noteId: "uuid", - changes: { title: "Updated Title" }, - updatedNote: { /* complete updated note object */ }, - timestamp: 1234567890, - fromUserId: "user_id" -} - -{ - type: "note_created_sync", - noteData: { /* complete note object */ }, - timestamp: 1234567890, - fromUserId: "user_id" | "server" -} - -{ - type: "note_deleted_sync", - noteId: "uuid", - timestamp: 1234567890, - fromUserId: "user_id" | "server" -} - -// Folder sync messages -{ - type: "folder_created_sync", - folderData: { /* complete folder object */ }, - timestamp: 1234567890, - fromUserId: "user_id" | "server" -} - -{ - type: "folder_updated_sync", - folderId: "uuid", - changes: { name: "Updated Name" }, - updatedFolder: { /* complete folder object */ }, - timestamp: 1234567890, - fromUserId: "user_id" | "server" -} - -{ - type: "folder_deleted_sync", - folderId: "uuid", - timestamp: 1234567890, - fromUserId: "user_id" | "server" -} - -// Operation confirmations -{ - type: "note_update_success", - noteId: "uuid", - updatedNote: { /* complete note object */ }, - timestamp: 1234567890 -} - -{ - type: "note_joined", - noteId: "uuid", - message: "Successfully joined note for real-time sync" -} - -{ - type: "note_left", - noteId: "uuid" -} - -// Heartbeat response -{ - type: "pong", - timestamp: 1234567890 -} - -// Errors -{ - type: "error", - message: "Error description" -} -``` - -## Security Features - -### Authentication - -All WebSocket operations require valid Clerk JWT authentication: - -1. **Connection Flow:** - - Client connects to WebSocket endpoint - - Server sends `connection_established` message - - Client must send `auth` message with JWT token within 30 seconds - - Server validates token with Clerk and responds with `auth_success` or `auth_failed` - -2. **JWT Validation:** - - Uses `@clerk/backend` for secure token verification - - Extracts user ID from token for authorization - - Validates token signature and expiration - -### Optional HMAC Message Authentication - -For enhanced security, the server supports HMAC-SHA256 message signing: - -1. **Session Secret Generation:** - ```typescript - // Generated after successful authentication - const sessionSecret = createHash('sha256') - .update(`${jwtToken}:${userId}:${flooredTimestamp}`) - .digest('hex'); - ``` - -2. **Message Signing (Client-side):** - ```typescript - const messageData = JSON.stringify({ payload, timestamp, nonce }); - const signature = hmacSHA256(sessionSecret, messageData).toBase64(); - - // Send signed message - { - payload: originalMessage, - signature: signature, - timestamp: Date.now(), - nonce: randomBase64String - } - ``` - -3. **Message Verification (Server-side):** - - Regenerates session secret using message timestamp - - Verifies HMAC signature matches - - Checks nonce for replay attack prevention - - Validates message age (5-minute window) - -### Rate Limiting & DoS Protection - -1. **Connection Limits:** - - Maximum 20 connections per user (configurable) - - Automatic cleanup of stale connections - -2. **Message Rate Limiting:** - - 300 messages per minute per connection (configurable) - - 1MB maximum message size - - Sliding window rate limiting - -3. **Nonce Replay Protection:** - - Tracks used nonces with timestamps - - Automatic cleanup of expired nonces (5-minute windows) - - Memory usage limits with emergency cleanup - -## Configuration - -### Environment Variables - -| Variable | Description | Default | Required | -|----------|-------------|---------|----------| -| `WS_RATE_LIMIT_WINDOW_MS` | Rate limit window in milliseconds | `60000` | No | -| `WS_RATE_LIMIT_MAX_MESSAGES` | Max messages per window | `300` | No | -| `WS_MAX_CONNECTIONS_PER_USER` | Max connections per user | `20` | No | -| `WS_AUTH_TIMEOUT_MS` | Authentication timeout | `30000` | No | -| `CLERK_SECRET_KEY` | Clerk secret for JWT verification | - | Yes | - -### Production Configuration - -```env -# WebSocket Security Settings -WS_RATE_LIMIT_MAX_MESSAGES=300 -WS_MAX_CONNECTIONS_PER_USER=20 -WS_AUTH_TIMEOUT_MS=30000 - -# Authentication -CLERK_SECRET_KEY=sk_live_your_production_key - -# CORS for WebSocket upgrade requests -CORS_ORIGINS=https://app.yourdomain.com,https://yourdomain.com -``` - -## Database Integration - -### Authorization Checks - -All note/folder operations include ownership validation: - -```typescript -// Example from note update handler -const existingNote = await db.query.notes.findFirst({ - where: and(eq(notes.id, noteId), eq(notes.userId, userId)) -}); - -if (!existingNote) { - // Access denied - user doesn't own this note - return sendError("Note not found or access denied"); -} -``` - -### Allowed Field Updates - -Note updates are restricted to safe fields only: - -```typescript -const allowedFields = [ - 'title', 'content', 'encryptedTitle', 'encryptedContent', - 'starred', 'archived', 'deleted', 'hidden', 'folderId' -]; -``` - -Fields like `id`, `userId`, `createdAt` are protected from modification. - -## Error Handling - -### Client Errors - -- **Authentication timeout**: Connection closed after 30 seconds without auth -- **Rate limit exceeded**: Temporary message rejection with retry advice -- **Invalid message format**: Error response with format requirements -- **Authorization failed**: Access denied for unauthorized operations - -### Server Errors - -- **Database errors**: Logged server-side, generic error to client -- **JWT validation errors**: Specific error types for debugging -- **Message processing errors**: Detailed logging with correlation IDs - -## Performance Considerations - -### Memory Management - -1. **Connection Tracking:** - - WeakMap references for automatic garbage collection - - Periodic cleanup of stale connections - - Memory usage monitoring and limits - -2. **Nonce Storage:** - - Time-based cleanup every 5 minutes - - Emergency cleanup at 10,000 nonces - - LRU eviction for memory efficiency - -3. **Message Queuing:** - - No persistent message queuing (stateless design) - - Clients responsible for handling missed messages - - Connection status indicators for client awareness - -### Scalability - -Current implementation uses in-memory storage suitable for single-instance deployments. For multi-instance scaling: - -- **Recommended**: Redis for shared rate limiting and nonce storage -- **Alternative**: Database-backed connection management -- **Load Balancing**: Sticky sessions or Redis pub/sub for message broadcasting - -## Development - -### Starting the WebSocket Server - -The WebSocket server starts automatically with the main API server: - -```bash -# Development -npm run dev - -# Production -npm run start -``` - -WebSocket endpoint available at: `ws://localhost:3000` (or configured port) - -### Debugging - -Enable detailed logging in development: - -```bash -# Set debug environment -DEBUG=websocket:* npm run dev - -# Or enable in code -console.log('WebSocket debug info:', { - connectionCount: connectionManager.getConnectionStats(), - messageType: message.type, - userId: ws.userId -}); -``` - -### Testing with Multiple Clients - -1. Open multiple browser tabs to your frontend -2. Authenticate each session -3. Join the same note in different tabs -4. Make changes in one tab to see real-time sync in others - -## API Integration - -### Server-Triggered Notifications - -The WebSocket manager provides methods for server-initiated sync: - -```typescript -// From REST API endpoints, trigger WebSocket sync -const wsManager = WebSocketManager.getInstance(); - -// Notify user's devices of note changes -wsManager?.notifyNoteUpdate(userId, noteId, changes, updatedNote); -wsManager?.notifyNoteCreated(userId, noteData); -wsManager?.notifyNoteDeleted(userId, noteId); - -// Notify folder changes -wsManager?.notifyFolderCreated(userId, folderData); -wsManager?.notifyFolderUpdated(userId, folderId, changes, updatedFolder); -wsManager?.notifyFolderDeleted(userId, folderId); -``` - -### Connection Statistics - -Monitor WebSocket health via REST endpoint: - -```http -GET /websocket/status - -Response: -{ - "status": "healthy", - "connections": { - "total": 15, - "authenticated": 12, - "perUser": [ - {"userId": "user_123", "deviceCount": 3} - ] - }, - "uptime": "2h 30m" -} -``` - -## Security Best Practices - -### Production Deployment - -1. **Use HTTPS/WSS**: Always use secure WebSocket connections in production -2. **JWT Secret Rotation**: Regularly rotate Clerk secret keys -3. **Rate Limiting**: Configure appropriate limits based on usage patterns -4. **Connection Monitoring**: Track connection patterns for abuse detection -5. **Error Logging**: Log security events without exposing sensitive data - -### Client Implementation - -1. **Token Management**: Handle JWT token refresh gracefully -2. **Reconnection Logic**: Implement exponential backoff for reconnections -3. **Message Validation**: Validate all incoming messages on client-side -4. **Error Handling**: Graceful degradation when WebSocket unavailable - -### Monitoring - -1. **Connection Metrics**: Track connection counts and patterns -2. **Message Metrics**: Monitor message rates and types -3. **Error Rates**: Alert on authentication failures or rate limiting -4. **Performance**: Monitor message latency and processing time - -## Troubleshooting - -### Common Issues - -**WebSocket connections fail:** -- Verify CORS configuration allows WebSocket upgrades -- Check that Clerk secret key is correctly configured -- Ensure no proxy/firewall blocking WebSocket connections - -**Authentication errors:** -- Validate JWT tokens are not expired -- Check Clerk configuration matches frontend -- Verify token is passed correctly in auth message - -**Missing sync messages:** -- Ensure clients properly join notes with `join_note` message -- Check that database updates are triggering WebSocket broadcasts -- Verify no errors in message handlers preventing broadcast - -**Memory usage issues:** -- Monitor nonce storage cleanup frequency -- Check for connection leaks in connection manager -- Review rate limiting settings for efficiency - -### Debug Logging - -Key logging points for troubleshooting: - -```typescript -// Connection events -console.log(`WebSocket connection established for user ${userId}`); - -// Authentication events -console.log(`JWT authentication successful for user ${userId}`); - -// Message processing -console.log(`Processing ${message.type} message from user ${userId}`); - -// Broadcasting -console.log(`Broadcasted to ${sentCount} devices for user ${userId}`); -``` - -## Contributing - -When modifying the WebSocket implementation: - -1. **Update type definitions** in `src/websocket/types.ts` -2. **Maintain backward compatibility** with existing message formats -3. **Add security validation** for new message types -4. **Update this documentation** with any protocol changes -5. **Test with multiple clients** to verify real-time sync works -6. **Consider rate limiting impact** of new message types - -## License - -This WebSocket implementation is part of the Typelets API and follows the same MIT license. \ No newline at end of file diff --git a/eslint.config.js b/eslint.config.mjs similarity index 52% rename from eslint.config.js rename to eslint.config.mjs index 7c55cae..9863ee8 100644 --- a/eslint.config.js +++ b/eslint.config.mjs @@ -1,11 +1,13 @@ -// eslint.config.js (replace your old .eslintrc file) import js from "@eslint/js"; import typescript from "@typescript-eslint/eslint-plugin"; import typescriptParser from "@typescript-eslint/parser"; import globals from "globals"; +import prettier from "eslint-plugin-prettier"; +import prettierConfig from "eslint-config-prettier"; export default [ js.configs.recommended, + prettierConfig, { files: ["**/*.{js,mjs,cjs,ts,tsx}"], languageOptions: { @@ -15,8 +17,8 @@ export default [ sourceType: "module", }, globals: { - ...globals.node, // Adds Node.js globals like 'process', 'Buffer', etc. - ...globals.es2021, // Adds modern JavaScript globals + ...globals.node, + ...globals.es2021, RequestInit: "readonly", Request: "readonly", Response: "readonly", @@ -27,20 +29,39 @@ export default [ }, plugins: { "@typescript-eslint": typescript, + prettier: prettier, }, rules: { - // Add your custom rules here - "@typescript-eslint/no-unused-vars": ["warn", { - "argsIgnorePattern": "^_", - "varsIgnorePattern": "^_", - "ignoreRestSiblings": true - }], + // TypeScript rules + "@typescript-eslint/no-unused-vars": [ + "warn", + { + argsIgnorePattern: "^_", + varsIgnorePattern: "^_", + ignoreRestSiblings: true, + }, + ], "@typescript-eslint/no-explicit-any": "warn", "no-undef": "error", - "no-unused-vars": "off", // Turn off base rule to avoid conflicts with @typescript-eslint version + "no-unused-vars": "off", + + // Prettier rules + "prettier/prettier": [ + "error", + { + semi: true, + trailingComma: "es5", + singleQuote: false, + printWidth: 100, + tabWidth: 2, + useTabs: false, + arrowParens: "always", + endOfLine: "auto", + }, + ], }, }, { - ignores: ["dist/**", "build/**", "node_modules/**", "*.config.js"], + ignores: ["dist/**", "build/**", "node_modules/**", "*.config.js", "*.config.mjs"], }, ]; diff --git a/newrelic.js b/newrelic.js index 3d98ab6..5944731 100644 --- a/newrelic.js +++ b/newrelic.js @@ -1,33 +1,45 @@ -'use strict' +"use strict"; /** * New Relic agent configuration. */ exports.config = { - app_name: [process.env.NEW_RELIC_APP_NAME || 'Typelets API'], - license_key: process.env.NEW_RELIC_LICENSE_KEY || '', + app_name: [process.env.NEW_RELIC_APP_NAME || "Typelets API"], + license_key: process.env.NEW_RELIC_LICENSE_KEY || "", logging: { - level: process.env.NEW_RELIC_LOG_LEVEL || 'info', - filepath: 'stdout' + level: + process.env.NEW_RELIC_LOG_LEVEL || (process.env.NODE_ENV === "production" ? "info" : "warn"), + filepath: "stdout", }, application_logging: { + enabled: true, forwarding: { - enabled: true - } + enabled: true, + max_samples_stored: 10000, + }, + metrics: { + enabled: true, + }, + local_decorating: { + enabled: true, + }, + }, + distributed_tracing: { + enabled: true, }, allow_all_headers: true, attributes: { exclude: [ - 'request.headers.cookie', - 'request.headers.authorization', - 'request.headers.proxyAuthorization', - 'request.headers.setCookie*', - 'request.headers.x*', - 'response.headers.cookie', - 'response.headers.authorization', - 'response.headers.proxyAuthorization', - 'response.headers.setCookie*', - 'response.headers.x*' - ] - } -} + "request.headers.cookie", + "request.headers.authorization", + "request.headers.proxyAuthorization", + "request.headers.setCookie*", + "request.headers.x*", + "response.headers.cookie", + "response.headers.authorization", + "response.headers.proxyAuthorization", + "response.headers.setCookie*", + "response.headers.x*", + ], + }, +}; diff --git a/package.json b/package.json index 9541067..d086dc1 100644 --- a/package.json +++ b/package.json @@ -45,18 +45,21 @@ "dotenv-flow": "^4.1.0", "drizzle-orm": "^0.44.2", "hono": "^4.8.3", + "ioredis": "^5.8.1", + "newrelic": "latest", "postgres": "^3.4.7", "ws": "^8.18.3", - "zod": "^3.25.67", - "newrelic": "latest" + "zod": "^3.25.67" }, "devDependencies": { + "@eslint/js": "^9.37.0", "@semantic-release/changelog": "^6.0.3", "@semantic-release/commit-analyzer": "^13.0.1", "@semantic-release/exec": "^7.1.0", "@semantic-release/git": "^10.0.1", "@semantic-release/github": "^11.0.5", "@semantic-release/release-notes-generator": "^14.1.0", + "@types/newrelic": "^9.14.8", "@types/node": "^24.0.10", "@types/pg": "^8.15.4", "@typescript-eslint/eslint-plugin": "^8.38.0", @@ -64,6 +67,8 @@ "drizzle-kit": "^0.31.4", "esbuild": "^0.25.9", "eslint": "^8.57.1", + "eslint-config-prettier": "^10.1.8", + "eslint-plugin-prettier": "^5.5.4", "globals": "^16.3.0", "husky": "^9.1.7", "jest": "^30.0.3", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index bdfeb1e..987c47d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -32,6 +32,9 @@ importers: hono: specifier: ^4.8.3 version: 4.9.6 + ioredis: + specifier: ^5.8.1 + version: 5.8.1 newrelic: specifier: latest version: 13.4.0 @@ -45,6 +48,9 @@ importers: specifier: ^3.25.67 version: 3.25.76 devDependencies: + '@eslint/js': + specifier: ^9.37.0 + version: 9.37.0 '@semantic-release/changelog': specifier: ^6.0.3 version: 6.0.3(semantic-release@24.2.7(typescript@5.9.2)) @@ -63,6 +69,9 @@ importers: '@semantic-release/release-notes-generator': specifier: ^14.1.0 version: 14.1.0(semantic-release@24.2.7(typescript@5.9.2)) + '@types/newrelic': + specifier: ^9.14.8 + version: 9.14.8 '@types/node': specifier: ^24.0.10 version: 24.3.1 @@ -84,6 +93,12 @@ importers: eslint: specifier: ^8.57.1 version: 8.57.1 + eslint-config-prettier: + specifier: ^10.1.8 + version: 10.1.8(eslint@8.57.1) + eslint-plugin-prettier: + specifier: ^5.5.4 + version: 5.5.4(eslint-config-prettier@10.1.8(eslint@8.57.1))(eslint@8.57.1)(prettier@3.6.2) globals: specifier: ^16.3.0 version: 16.3.0 @@ -633,6 +648,10 @@ packages: resolution: {integrity: sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + '@eslint/js@9.37.0': + resolution: {integrity: sha512-jaS+NJ+hximswBG6pjNX0uEJZkrT0zwpVi3BA3vX22aFGjJjmgSTSmPpZCRKmoBL5VY/M6p0xsSJx7rk7sy5gg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@grpc/grpc-js@1.14.0': resolution: {integrity: sha512-N8Jx6PaYzcTRNzirReJCtADVoq4z7+1KQ4E70jTg/koQiMoUSN1kbNjPOqpPbhMFhfU1/l7ixspPl8dNY+FoUg==} engines: {node: '>=12.10.0'} @@ -672,6 +691,9 @@ packages: resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} deprecated: Use @eslint/object-schema instead + '@ioredis/commands@1.4.0': + resolution: {integrity: sha512-aFT2yemJJo+TZCmieA7qnYGQooOS7QfNmYrzGtsYd3g9j5iDP8AimYYAesf79ohjbLG12XxC4nG5DyEnC88AsQ==} + '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} @@ -1117,6 +1139,9 @@ packages: '@types/luxon@3.4.2': resolution: {integrity: sha512-TifLZlFudklWlMBfhubvgqTXRzLDI5pCbGa4P8a3wPyUQSW+1xQ5eDsreP9DWHX3tjq1ke96uYG/nwundroWcA==} + '@types/newrelic@9.14.8': + resolution: {integrity: sha512-rkOTEVR7Lui4TTEykDUxIxCbFkcI/yw3C8URLOWM84zjuHh9W35RAequHTEvGBbbrLCdn43FVTHMLji4uunDWQ==} + '@types/node@24.3.1': resolution: {integrity: sha512-3vXmQDXy+woz+gnrTvuvNrPzekOi+Ds0ReMxw0LzBiK3a+1k0kQn9f2NWk+lgD4rJehFUmYy2gMhJ2ZI+7YP9g==} @@ -1531,6 +1556,10 @@ packages: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} + cluster-key-slot@1.1.2: + resolution: {integrity: sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==} + engines: {node: '>=0.10.0'} + co@4.6.0: resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} @@ -1663,6 +1692,10 @@ packages: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} + denque@2.1.0: + resolution: {integrity: sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==} + engines: {node: '>=0.10'} + dequal@2.0.3: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} engines: {node: '>=6'} @@ -1886,6 +1919,26 @@ packages: resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} engines: {node: '>=12'} + eslint-config-prettier@10.1.8: + resolution: {integrity: sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==} + hasBin: true + peerDependencies: + eslint: '>=7.0.0' + + eslint-plugin-prettier@5.5.4: + resolution: {integrity: sha512-swNtI95SToIz05YINMA6Ox5R057IMAmWZ26GqPxusAp1TZzj+IdY9tXNWWD3vkF/wEqydCONcwjTFpxybBqZsg==} + engines: {node: ^14.18.0 || >=16.0.0} + peerDependencies: + '@types/eslint': '>=8.0.0' + eslint: '>=8.0.0' + eslint-config-prettier: '>= 7.0.0 <10.0.0 || >=10.1.0' + prettier: '>=3.0.0' + peerDependenciesMeta: + '@types/eslint': + optional: true + eslint-config-prettier: + optional: true + eslint-scope@7.2.2: resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -1959,6 +2012,9 @@ packages: fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + fast-diff@1.3.0: + resolution: {integrity: sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==} + fast-glob@3.3.3: resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} engines: {node: '>=8.6.0'} @@ -2300,6 +2356,10 @@ packages: resolution: {integrity: sha512-2dYz766i9HprMBasCMvHMuazJ7u4WzhJwo5kb3iPSiW/iRYV6uPari3zHoqZlnuaR7V1bEiNMxikhp37rdBXbw==} engines: {node: '>=12'} + ioredis@5.8.1: + resolution: {integrity: sha512-Qho8TgIamqEPdgiMadJwzRMW3TudIg6vpg4YONokGDudy4eqRIJtDbVX72pfLBcWxvbn3qm/40TyGUObdW4tLQ==} + engines: {node: '>=12.22.0'} + is-arrayish@0.2.1: resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} @@ -2622,9 +2682,15 @@ packages: lodash.capitalize@4.2.1: resolution: {integrity: sha512-kZzYOKspf8XVX5AvmQF94gQW0lejFVgb80G85bU4ZWzoJ6C03PQg3coYAUpSTpQWelrZELd3XWgHzw4Ck5kaIw==} + lodash.defaults@4.2.0: + resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==} + lodash.escaperegexp@4.1.2: resolution: {integrity: sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw==} + lodash.isarguments@3.1.0: + resolution: {integrity: sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==} + lodash.isplainobject@4.0.6: resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==} @@ -3098,6 +3164,10 @@ packages: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} + prettier-linter-helpers@1.0.0: + resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} + engines: {node: '>=6.0.0'} + prettier@3.6.2: resolution: {integrity: sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==} engines: {node: '>=14'} @@ -3167,6 +3237,14 @@ packages: resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} engines: {node: '>= 6'} + redis-errors@1.2.0: + resolution: {integrity: sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==} + engines: {node: '>=4'} + + redis-parser@3.0.0: + resolution: {integrity: sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==} + engines: {node: '>=4'} + registry-auth-token@5.1.0: resolution: {integrity: sha512-GdekYuwLXLxMuFTwAPg5UKGLW/UXzQrZvH/Zj791BQif5T05T0RsaLfHc9q3ZOKi7n+BoprPD9mJ0O0k4xzUlw==} engines: {node: '>=14'} @@ -3315,6 +3393,9 @@ packages: resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} engines: {node: '>=10'} + standard-as-callback@2.1.0: + resolution: {integrity: sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==} + standardwebhooks@1.0.0: resolution: {integrity: sha512-BbHGOQK9olHPMvQNHWul6MYlrRTAOKn03rOe4A8O3CLWhNf4YHBqq2HJKKC+sfqpxiBY52pNeesD6jIiLDz8jg==} @@ -4104,6 +4185,8 @@ snapshots: '@eslint/js@8.57.1': {} + '@eslint/js@9.37.0': {} + '@grpc/grpc-js@1.14.0': dependencies: '@grpc/proto-loader': 0.8.0 @@ -4144,6 +4227,8 @@ snapshots: '@humanwhocodes/object-schema@2.0.3': {} + '@ioredis/commands@1.4.0': {} + '@isaacs/cliui@8.0.2': dependencies: string-width: 5.1.2 @@ -4803,6 +4888,8 @@ snapshots: '@types/luxon@3.4.2': {} + '@types/newrelic@9.14.8': {} + '@types/node@24.3.1': dependencies: undici-types: 7.10.0 @@ -5241,6 +5328,8 @@ snapshots: strip-ansi: 6.0.1 wrap-ansi: 7.0.0 + cluster-key-slot@1.1.2: {} + co@4.6.0: {} collect-v8-coverage@1.0.2: {} @@ -5349,6 +5438,8 @@ snapshots: delayed-stream@1.0.0: {} + denque@2.1.0: {} + dequal@2.0.3: {} detect-newline@3.1.0: {} @@ -5514,6 +5605,19 @@ snapshots: escape-string-regexp@5.0.0: {} + eslint-config-prettier@10.1.8(eslint@8.57.1): + dependencies: + eslint: 8.57.1 + + eslint-plugin-prettier@5.5.4(eslint-config-prettier@10.1.8(eslint@8.57.1))(eslint@8.57.1)(prettier@3.6.2): + dependencies: + eslint: 8.57.1 + prettier: 3.6.2 + prettier-linter-helpers: 1.0.0 + synckit: 0.11.11 + optionalDependencies: + eslint-config-prettier: 10.1.8(eslint@8.57.1) + eslint-scope@7.2.2: dependencies: esrecurse: 4.3.0 @@ -5644,6 +5748,8 @@ snapshots: fast-deep-equal@3.1.3: {} + fast-diff@1.3.0: {} + fast-glob@3.3.3: dependencies: '@nodelib/fs.stat': 2.0.5 @@ -5983,6 +6089,20 @@ snapshots: from2: 2.3.0 p-is-promise: 3.0.0 + ioredis@5.8.1: + dependencies: + '@ioredis/commands': 1.4.0 + cluster-key-slot: 1.1.2 + debug: 4.4.1 + denque: 2.1.0 + lodash.defaults: 4.2.0 + lodash.isarguments: 3.1.0 + redis-errors: 1.2.0 + redis-parser: 3.0.0 + standard-as-callback: 2.1.0 + transitivePeerDependencies: + - supports-color + is-arrayish@0.2.1: {} is-core-module@2.16.1: @@ -6466,8 +6586,12 @@ snapshots: lodash.capitalize@4.2.1: {} + lodash.defaults@4.2.0: {} + lodash.escaperegexp@4.1.2: {} + lodash.isarguments@3.1.0: {} + lodash.isplainobject@4.0.6: {} lodash.isstring@4.0.1: {} @@ -6850,6 +6974,10 @@ snapshots: prelude-ls@1.2.1: {} + prettier-linter-helpers@1.0.0: + dependencies: + fast-diff: 1.3.0 + prettier@3.6.2: {} pretty-bytes@5.6.0: {} @@ -6938,6 +7066,12 @@ snapshots: string_decoder: 1.1.1 util-deprecate: 1.0.2 + redis-errors@1.2.0: {} + + redis-parser@3.0.0: + dependencies: + redis-errors: 1.2.0 + registry-auth-token@5.1.0: dependencies: '@pnpm/npm-conf': 2.3.1 @@ -7095,6 +7229,8 @@ snapshots: dependencies: escape-string-regexp: 2.0.0 + standard-as-callback@2.1.0: {} + standardwebhooks@1.0.0: dependencies: '@stablelib/base64': 1.0.1 diff --git a/src/db/schema.ts b/src/db/schema.ts index 0316fe8..183134a 100644 --- a/src/db/schema.ts +++ b/src/db/schema.ts @@ -1,12 +1,4 @@ -import { - pgTable, - text, - timestamp, - boolean, - uuid, - integer, - index, -} from "drizzle-orm/pg-core"; +import { pgTable, text, timestamp, boolean, uuid, integer, index } from "drizzle-orm/pg-core"; import { relations } from "drizzle-orm"; export const users = pgTable("users", { @@ -72,18 +64,14 @@ export const fileAttachments = pgTable( mimeType: text("mime_type").notNull(), size: integer("size").notNull(), encryptedData: text("encrypted_data").notNull(), // Base64 encrypted content - encryptedTitle: text("encrypted_title") - .default("encrypted_placeholder") - .notNull(), // Encrypted filename with default placeholder + encryptedTitle: text("encrypted_title").default("encrypted_placeholder").notNull(), // Encrypted filename with default placeholder iv: text("iv").notNull(), // Initialization vector for decryption salt: text("salt").notNull(), // Salt used in encryption - uploadedAt: timestamp("uploaded_at", { withTimezone: true }) - .defaultNow() - .notNull(), + uploadedAt: timestamp("uploaded_at", { withTimezone: true }).defaultNow().notNull(), }, (table) => ({ noteIdIdx: index("idx_file_attachments_note_id").on(table.noteId), - }), + }) ); export const usersRelations = relations(users, ({ many }) => ({ @@ -119,15 +107,12 @@ export const notesRelations = relations(notes, ({ one, many }) => ({ attachments: many(fileAttachments), })); -export const fileAttachmentsRelations = relations( - fileAttachments, - ({ one }) => ({ - note: one(notes, { - fields: [fileAttachments.noteId], - references: [notes.id], - }), +export const fileAttachmentsRelations = relations(fileAttachments, ({ one }) => ({ + note: one(notes, { + fields: [fileAttachments.noteId], + references: [notes.id], }), -); +})); export type User = typeof users.$inferSelect; export type UserInsert = typeof users.$inferInsert; diff --git a/src/lib/cache-keys.ts b/src/lib/cache-keys.ts new file mode 100644 index 0000000..2877c6f --- /dev/null +++ b/src/lib/cache-keys.ts @@ -0,0 +1,35 @@ +// Cache key generators for consistent naming +export const CacheKeys = { + // User-related + userMetadata: (userId: string) => `user:${userId}:metadata`, + userUsage: (userId: string) => `user:${userId}:usage`, + + // Folder-related + foldersList: (userId: string) => `folders:${userId}`, + folderTree: (userId: string) => `folders:${userId}:tree`, + folderNoteCount: (folderId: string) => `folder:${folderId}:noteCount`, + + // Note-related + notesList: (userId: string, page: number) => `notes:${userId}:page:${page}`, + notesStarred: (userId: string) => `notes:${userId}:starred`, + notesArchived: (userId: string) => `notes:${userId}:archived`, + notesDeleted: (userId: string) => `notes:${userId}:deleted`, + notesDeletedCount: (userId: string) => `notes:${userId}:deletedCount`, + + // Attachment-related + noteAttachments: (noteId: string) => `attachments:note:${noteId}`, +} as const; + +// Cache TTL values (in seconds) +export const CacheTTL = { + userMetadata: 300, // 5 minutes + userUsage: 300, // 5 minutes + foldersList: 600, // 10 minutes + folderTree: 900, // 15 minutes + folderNoteCount: 600, // 10 minutes + notesList: 120, // 2 minutes + notesStarred: 300, // 5 minutes + notesArchived: 300, // 5 minutes + notesDeleted: 300, // 5 minutes + noteAttachments: 1800, // 30 minutes +} as const; diff --git a/src/lib/cache.ts b/src/lib/cache.ts new file mode 100644 index 0000000..cb84cde --- /dev/null +++ b/src/lib/cache.ts @@ -0,0 +1,172 @@ +import { Cluster, ClusterOptions } from "ioredis"; +import { logger } from "./logger"; + +let client: Cluster | null = null; + +export function getCacheClient(): Cluster | null { + if (!process.env.VALKEY_HOST) { + logger.warn("VALKEY_HOST not configured, caching disabled"); + return null; + } + + if (!client) { + try { + const clusterOptions: ClusterOptions = { + dnsLookup: (address, callback) => callback(null, address), + redisOptions: { + tls: process.env.NODE_ENV === "production" ? {} : undefined, + connectTimeout: 5000, + }, + clusterRetryStrategy: (times) => { + if (times > 3) { + logger.error("Valkey connection failed after 3 retries"); + return null; + } + return Math.min(times * 200, 2000); + }, + }; + + client = new Cluster( + [ + { + host: process.env.VALKEY_HOST, + port: parseInt(process.env.VALKEY_PORT || "6379"), + }, + ], + clusterOptions + ); + + client.on("error", (err) => { + logger.error("Valkey client error", { error: err.message }, err); + }); + + client.on("connect", () => { + logger.info("Connected to Valkey cluster"); + }); + } catch (error) { + logger.error( + "Failed to initialize Valkey client", + { + error: error instanceof Error ? error.message : String(error), + }, + error instanceof Error ? error : undefined + ); + client = null; + } + } + + return client; +} + +export async function closeCache(): Promise { + if (client) { + await client.disconnect(); + client = null; + logger.info("Valkey connection closed"); + } +} + +// Cache utilities +export async function getCache(key: string): Promise { + const cache = getCacheClient(); + if (!cache) return null; + + const startTime = Date.now(); + try { + const data = await cache.get(key); + const duration = Date.now() - startTime; + const hit = data !== null; + + // Log cache operation with metrics + logger.cacheOperation("get", key, hit, duration); + + return data ? JSON.parse(data) : null; + } catch (error) { + logger.cacheError("get", key, error instanceof Error ? error : new Error(String(error))); + return null; + } +} + +export async function setCache(key: string, value: unknown, ttlSeconds?: number): Promise { + const cache = getCacheClient(); + if (!cache) return; + + const startTime = Date.now(); + try { + const serialized = JSON.stringify(value); + if (ttlSeconds) { + await cache.setex(key, ttlSeconds, serialized); + } else { + await cache.set(key, serialized); + } + const duration = Date.now() - startTime; + + // Log cache operation with metrics + logger.cacheOperation("set", key, undefined, duration, ttlSeconds); + } catch (error) { + logger.cacheError("set", key, error instanceof Error ? error : new Error(String(error))); + } +} + +export async function deleteCache(...keys: string[]): Promise { + const cache = getCacheClient(); + if (!cache || keys.length === 0) return; + + const startTime = Date.now(); + try { + await cache.del(...keys); + const duration = Date.now() - startTime; + + // Log cache operation with metrics (use first key as representative) + logger.cacheOperation("delete", keys[0], undefined, duration, undefined, keys.length); + } catch (error) { + logger.cacheError( + "delete", + keys.join(", "), + error instanceof Error ? error : new Error(String(error)) + ); + } +} + +export async function deleteCachePattern(pattern: string): Promise { + const cache = getCacheClient(); + if (!cache) return; + + try { + const keys: string[] = []; + + // In cluster mode, we need to scan all master nodes + const nodes = cache.nodes("master"); + + for (const node of nodes) { + let cursor = "0"; + do { + // Scan each master node individually + const result = await node.scan(cursor, "MATCH", pattern, "COUNT", 100); + cursor = result[0]; + keys.push(...result[1]); + } while (cursor !== "0"); + } + + if (keys.length > 0) { + // Delete in batches to avoid overwhelming the cluster + const batchSize = 100; + for (let i = 0; i < keys.length; i += batchSize) { + const batch = keys.slice(i, i + batchSize); + await cache.del(...batch); + } + + logger.info(`Deleted cache keys matching pattern`, { + pattern, + keyCount: keys.length, + nodeCount: nodes.length, + }); + } + } catch (error) { + logger.cacheError( + "pattern delete", + pattern, + error instanceof Error ? error : new Error(String(error)) + ); + } +} diff --git a/src/lib/logger.ts b/src/lib/logger.ts index f81b8fd..5817649 100644 --- a/src/lib/logger.ts +++ b/src/lib/logger.ts @@ -1,13 +1,17 @@ +import * as newrelic from "newrelic"; + interface LogLevel { level: string; priority: number; } +type LogMetadata = Record; + const LOG_LEVELS: Record = { - error: { level: 'error', priority: 0 }, - warn: { level: 'warn', priority: 1 }, - info: { level: 'info', priority: 2 }, - debug: { level: 'debug', priority: 3 } + error: { level: "error", priority: 0 }, + warn: { level: "warn", priority: 1 }, + info: { level: "info", priority: 2 }, + debug: { level: "debug", priority: 3 }, }; class Logger { @@ -15,14 +19,18 @@ class Logger { private service: string; private version: string; private currentLogLevel: LogLevel; + private newrelicEnabled: boolean; constructor() { - this.environment = process.env.NODE_ENV || 'development'; - this.service = 'typelets-api'; - this.version = process.env.npm_package_version || '1.0.0'; + this.environment = process.env.NODE_ENV || "development"; + this.service = "typelets-api"; + this.version = process.env.npm_package_version || "1.0.0"; + // Check if New Relic is properly initialized + this.newrelicEnabled = typeof newrelic === "object" && !!newrelic; // Set log level based on environment - const logLevelName = process.env.LOG_LEVEL || (this.environment === 'production' ? 'info' : 'debug'); + const logLevelName = + process.env.LOG_LEVEL || (this.environment === "production" ? "info" : "debug"); this.currentLogLevel = LOG_LEVELS[logLevelName] || LOG_LEVELS.info; } @@ -30,7 +38,7 @@ class Logger { return level.priority <= this.currentLogLevel.priority; } - private formatLog(level: string, message: string, meta: Record = {}): string { + private formatLog(level: string, message: string, meta: LogMetadata = {}): string { const logEntry = { timestamp: new Date().toISOString(), level, @@ -38,94 +46,234 @@ class Logger { environment: this.environment, version: this.version, message, - ...meta + ...meta, }; return JSON.stringify(logEntry); } - error(message: string, meta: Record = {}): void { + error(message: string, meta: LogMetadata = {}, error?: Error): void { if (this.shouldLog(LOG_LEVELS.error)) { - console.error(this.formatLog('error', message, meta)); + console.error(this.formatLog("error", message, meta)); + + // Send error to New Relic + if (this.newrelicEnabled) { + if (error) { + newrelic.noticeError(error, meta); + } else { + newrelic.noticeError(new Error(message), meta); + } + } } } - warn(message: string, meta: Record = {}): void { + warn(message: string, meta: LogMetadata = {}): void { if (this.shouldLog(LOG_LEVELS.warn)) { - console.warn(this.formatLog('warn', message, meta)); + console.warn(this.formatLog("warn", message, meta)); + + // Log warning as custom event in New Relic + if (this.newrelicEnabled) { + newrelic.recordCustomEvent("ApplicationWarning", { + message, + ...meta, + }); + } } } - info(message: string, meta: Record = {}): void { + info(message: string, meta: LogMetadata = {}): void { if (this.shouldLog(LOG_LEVELS.info)) { - console.log(this.formatLog('info', message, meta)); + console.log(this.formatLog("info", message, meta)); } } - debug(message: string, meta: Record = {}): void { + debug(message: string, meta: LogMetadata = {}): void { if (this.shouldLog(LOG_LEVELS.debug)) { - console.log(this.formatLog('debug', message, meta)); + console.log(this.formatLog("debug", message, meta)); } } // Special methods for different types of events - httpRequest(method: string, path: string, statusCode: number, duration: number, userId?: string): void { - this.info('HTTP request completed', { - type: 'http_request', + httpRequest( + method: string, + path: string, + statusCode: number, + duration: number, + userId?: string + ): void { + this.info("HTTP request completed", { + type: "http_request", method, path, statusCode, duration, - userId: userId || 'anonymous' + userId: userId || "anonymous", }); + + // Record metrics to New Relic + if (this.newrelicEnabled) { + // Record response time + this.recordMetric("Custom/HTTP/ResponseTime", duration); + + // Record status code metrics + const statusCategory = Math.floor(statusCode / 100); + this.recordMetric(`Custom/HTTP/Status/${statusCategory}xx`, 1); + + // Record request count by method + this.recordMetric(`Custom/HTTP/Method/${method}`, 1); + } } websocketEvent(eventType: string, userId?: string, connectionCount?: number): void { - this.info('WebSocket event', { - type: 'websocket_event', + const meta: LogMetadata = { + type: "websocket_event", eventType, - userId: userId || 'anonymous', - connectionCount - }); + userId: userId || "anonymous", + }; + + if (connectionCount !== undefined) { + meta.connectionCount = connectionCount; + } + + this.info("WebSocket event", meta); } databaseQuery(operation: string, table: string, duration: number, userId?: string): void { - this.debug('Database query executed', { - type: 'database_query', + this.debug("Database query executed", { + type: "database_query", operation, table, duration, - userId: userId || 'anonymous' + userId: userId || "anonymous", }); } codeExecution(languageId: number, duration: number, success: boolean, userId?: string): void { - this.info('Code execution completed', { - type: 'code_execution', + this.info("Code execution completed", { + type: "code_execution", languageId, duration, success, - userId: userId || 'anonymous' + userId: userId || "anonymous", }); } - businessEvent(eventName: string, userId: string, metadata: Record = {}): void { - this.info('Business event', { - type: 'business_event', + businessEvent(eventName: string, userId: string, metadata: LogMetadata = {}): void { + this.info("Business event", { + type: "business_event", eventName, userId, - ...metadata + ...metadata, }); + + // Record business event to New Relic + if (this.newrelicEnabled) { + this.recordCustomEvent("BusinessEvent", { + eventName, + userId, + ...metadata, + }); + this.recordMetric(`Custom/Business/${eventName}`, 1); + } } - securityEvent(eventType: string, severity: 'low' | 'medium' | 'high' | 'critical', details: Record): void { - this.warn('Security event detected', { - type: 'security_event', + securityEvent( + eventType: string, + severity: "low" | "medium" | "high" | "critical", + details: LogMetadata + ): void { + this.warn("Security event detected", { + type: "security_event", eventType, severity, - ...details + ...details, }); + + // Record security event to New Relic + if (this.newrelicEnabled) { + this.recordCustomEvent("SecurityEvent", { + eventType, + severity, + ...details, + }); + this.recordMetric(`Custom/Security/${severity}`, 1); + } + } + + // New Relic specific methods + recordMetric(name: string, value: number): void { + if (this.newrelicEnabled) { + newrelic.recordMetric(name, value); + } + } + + recordCustomEvent(eventType: string, attributes: LogMetadata): void { + if (this.newrelicEnabled) { + newrelic.recordCustomEvent(eventType, attributes); + } + } + + // Cache-specific logging methods + cacheOperation( + operation: "get" | "set" | "delete", + key: string, + hit?: boolean, + duration?: number, + ttl?: number, + keyCount?: number + ): void { + const meta: LogMetadata = { + type: "cache_operation", + operation, + key, + }; + + if (hit !== undefined) meta.hit = hit; + if (duration !== undefined) meta.duration = duration; + if (ttl !== undefined) meta.ttl = ttl; + if (keyCount !== undefined) meta.keyCount = keyCount; + + // Log at debug level + this.debug(`Cache ${operation}${hit !== undefined ? (hit ? " HIT" : " MISS") : ""}`, meta); + + // Send metrics to New Relic + if (this.newrelicEnabled) { + if (operation === "get" && hit !== undefined) { + this.recordMetric(`Custom/Cache/${hit ? "Hit" : "Miss"}`, 1); + if (duration !== undefined) { + this.recordMetric("Custom/Cache/GetDuration", duration); + } + } else if (operation === "set" && duration !== undefined) { + this.recordMetric("Custom/Cache/Set", 1); + this.recordMetric("Custom/Cache/SetDuration", duration); + } else if (operation === "delete") { + this.recordMetric("Custom/Cache/Delete", keyCount || 1); + if (duration !== undefined) { + this.recordMetric("Custom/Cache/DeleteDuration", duration); + } + } + + // Record custom event for detailed analysis + this.recordCustomEvent("CacheOperation", meta); + } + } + + cacheError(operation: string, key: string, error: Error): void { + this.error( + `Cache ${operation} error for key ${key}`, + { + type: "cache_error", + operation, + key, + error: error.message, + }, + error + ); + + if (this.newrelicEnabled) { + this.recordMetric("Custom/Cache/Error", 1); + } } } -export const logger = new Logger(); \ No newline at end of file +export const logger = new Logger(); diff --git a/src/lib/validation.ts b/src/lib/validation.ts index 69d74f8..2bd0029 100644 --- a/src/lib/validation.ts +++ b/src/lib/validation.ts @@ -27,22 +27,18 @@ export const updateFolderSchema = z.object({ }); export const reorderFolderSchema = z.object({ - newIndex: z - .number() - .int() - .min(0) - .describe("New position index for the folder"), + newIndex: z.number().int().min(0).describe("New position index for the folder"), }); export const createNoteSchema = z.object({ - title: z.string().refine( - (value) => value === "[ENCRYPTED]", - "Title must be '[ENCRYPTED]'" - ).optional(), - content: z.string().refine( - (value) => value === "[ENCRYPTED]", - "Content must be '[ENCRYPTED]'" - ).optional(), + title: z + .string() + .refine((value) => value === "[ENCRYPTED]", "Title must be '[ENCRYPTED]'") + .optional(), + content: z + .string() + .refine((value) => value === "[ENCRYPTED]", "Content must be '[ENCRYPTED]'") + .optional(), folderId: z.string().uuid().nullable().optional(), starred: z.boolean().optional(), tags: z.array(z.string().max(50)).max(20).optional(), @@ -54,14 +50,14 @@ export const createNoteSchema = z.object({ }); export const updateNoteSchema = z.object({ - title: z.string().refine( - (value) => value === "[ENCRYPTED]", - "Title must be '[ENCRYPTED]'" - ).optional(), - content: z.string().refine( - (value) => value === "[ENCRYPTED]", - "Content must be '[ENCRYPTED]'" - ).optional(), + title: z + .string() + .refine((value) => value === "[ENCRYPTED]", "Title must be '[ENCRYPTED]'") + .optional(), + content: z + .string() + .refine((value) => value === "[ENCRYPTED]", "Content must be '[ENCRYPTED]'") + .optional(), folderId: z.string().uuid().nullable().optional(), starred: z.boolean().optional(), archived: z.boolean().optional(), @@ -103,15 +99,15 @@ export const foldersQuerySchema = z // Allowed MIME types for security const allowedMimeTypes = [ - 'image/jpeg', - 'image/png', - 'image/gif', - 'image/webp', - 'application/pdf', - 'text/plain', - 'text/markdown', - 'application/json', - 'text/csv', + "image/jpeg", + "image/png", + "image/gif", + "image/webp", + "application/pdf", + "text/plain", + "text/markdown", + "application/json", + "text/csv", ] as const; export const uploadFileSchema = z.object({ @@ -119,28 +115,23 @@ export const uploadFileSchema = z.object({ .string() .min(1) .max(255) - .refine( - (name) => { - // Check for dangerous characters and patterns - const dangerousChars = /[<>:"/\\|?*]/; - // Check for control characters (ASCII 0-31) - const hasControlChars = name.split('').some(char => { - const code = char.charCodeAt(0); - return code >= 0 && code <= 31; - }); - const dangerousPatterns = /^\./; // Files starting with dot + .refine((name) => { + // Check for dangerous characters and patterns + const dangerousChars = /[<>:"/\\|?*]/; + // Check for control characters (ASCII 0-31) + const hasControlChars = name.split("").some((char) => { + const code = char.charCodeAt(0); + return code >= 0 && code <= 31; + }); + const dangerousPatterns = /^\./; // Files starting with dot - return !dangerousChars.test(name) && - !hasControlChars && - !dangerousPatterns.test(name); - }, - "Invalid filename characters" - ), + return !dangerousChars.test(name) && !hasControlChars && !dangerousPatterns.test(name); + }, "Invalid filename characters"), mimeType: z .string() .refine( - (type): type is typeof allowedMimeTypes[number] => - allowedMimeTypes.includes(type as typeof allowedMimeTypes[number]), + (type): type is (typeof allowedMimeTypes)[number] => + allowedMimeTypes.includes(type as (typeof allowedMimeTypes)[number]), "File type not allowed" ), size: z diff --git a/src/middleware/auth.ts b/src/middleware/auth.ts index f7b338d..e3deb34 100644 --- a/src/middleware/auth.ts +++ b/src/middleware/auth.ts @@ -3,6 +3,7 @@ import { HTTPException } from "hono/http-exception"; import { verifyToken } from "@clerk/backend"; import { db, users, folders, type User } from "../db"; import { eq } from "drizzle-orm"; +import { logger } from "../lib/logger"; import type { ClerkUserData, ClerkJWTPayload, @@ -13,7 +14,7 @@ import type { if (!process.env.CLERK_SECRET_KEY) { throw new Error( - "Missing Clerk Secret Key - Please add CLERK_SECRET_KEY to your environment variables", + "Missing Clerk Secret Key - Please add CLERK_SECRET_KEY to your environment variables" ); } @@ -25,9 +26,7 @@ declare module "hono" { } } -const extractAndVerifyClerkToken = async ( - c: Context, -): Promise => { +const extractAndVerifyClerkToken = async (c: Context): Promise => { const authHeader = c.req.header("Authorization"); if (!authHeader || !authHeader.startsWith("Bearer ")) { @@ -42,25 +41,21 @@ const extractAndVerifyClerkToken = async ( })) as unknown as ClerkJWTPayload; try { - const userResponse = await fetch( - `https://api.clerk.com/v1/users/${payload.sub}`, - { - headers: { - Authorization: `Bearer ${process.env.CLERK_SECRET_KEY}`, - "Content-Type": "application/json", - }, + const userResponse = await fetch(`https://api.clerk.com/v1/users/${payload.sub}`, { + headers: { + Authorization: `Bearer ${process.env.CLERK_SECRET_KEY}`, + "Content-Type": "application/json", }, - ); + }); if (userResponse.ok) { const clerkUser: ClerkApiUser = await userResponse.json(); - const userData: ClerkUserData = { + return { id: clerkUser.id, email: clerkUser.email_addresses?.[0]?.email_address || "", firstName: clerkUser.first_name || null, lastName: clerkUser.last_name || null, }; - return userData; } else { return { id: payload.sub, @@ -99,10 +94,8 @@ export const authMiddleware = async (c: Context, next: Next) => { try { const updateData: UserUpdateData = {}; if (userData.email) updateData.email = userData.email; - if (userData.firstName !== undefined) - updateData.firstName = userData.firstName; - if (userData.lastName !== undefined) - updateData.lastName = userData.lastName; + if (userData.firstName !== undefined) updateData.firstName = userData.firstName; + if (userData.lastName !== undefined) updateData.lastName = userData.lastName; if (Object.keys(updateData).length > 0) { const [updatedUser] = await db @@ -138,7 +131,7 @@ export const authMiddleware = async (c: Context, next: Next) => { defaultFolders.map((folder) => ({ ...folder, userId: newUser.id, - })), + })) ); existingUser = newUser; @@ -146,8 +139,7 @@ export const authMiddleware = async (c: Context, next: Next) => { const dbError = error as DatabaseError; if ( dbError.code === "23505" && - (dbError.constraint_name === "users_pkey" || - dbError.detail?.includes("already exists")) + (dbError.constraint_name === "users_pkey" || dbError.detail?.includes("already exists")) ) { existingUser = await db.query.users.findFirst({ where: eq(users.id, userData.id), @@ -159,7 +151,14 @@ export const authMiddleware = async (c: Context, next: Next) => { }); } } else { - console.error("Database error creating user:", error); + logger.error( + "Database error creating user", + { + userId: userData.id, + error: error instanceof Error ? error.message : String(error), + }, + error instanceof Error ? error : undefined + ); throw new HTTPException(500, { message: "Failed to create user profile", }); diff --git a/src/middleware/rate-limit.ts b/src/middleware/rate-limit.ts index 181bcfe..631392e 100644 --- a/src/middleware/rate-limit.ts +++ b/src/middleware/rate-limit.ts @@ -36,7 +36,10 @@ class InMemoryRateLimitStore { const store = new InMemoryRateLimitStore(); // Cleanup expired entries every 5 minutes -let cleanupInterval: ReturnType | null = setInterval(() => store.cleanup(), 5 * 60 * 1000); +let cleanupInterval: ReturnType | null = setInterval( + () => store.cleanup(), + 5 * 60 * 1000 +); // Graceful cleanup function export const cleanup = (): void => { @@ -61,10 +64,11 @@ export const rateLimit = (options: RateLimitOptions) => { keyGenerator = (c: Context) => { // Use combination of IP and user ID for authenticated requests const userId = c.get("userId"); - const ip = c.env?.CF_CONNECTING_IP || - c.req.header("x-forwarded-for")?.split(",")[0] || - c.req.header("x-real-ip") || - "unknown"; + const ip = + c.env?.CF_CONNECTING_IP || + c.req.header("x-forwarded-for")?.split(",")[0] || + c.req.header("x-real-ip") || + "unknown"; return userId ? `${userId}:${ip}` : ip; }, skipSuccessfulRequests = false, @@ -93,7 +97,7 @@ export const rateLimit = (options: RateLimitOptions) => { limit: max, remaining: 0, reset: entry.resetTime, - } + }, }); } @@ -106,11 +110,10 @@ export const rateLimit = (options: RateLimitOptions) => { // Optionally skip counting successful/failed requests const shouldSkip = - (skipSuccessfulRequests && c.res.status < 400) || - (skipFailedRequests && c.res.status >= 400); + (skipSuccessfulRequests && c.res.status < 400) || (skipFailedRequests && c.res.status >= 400); if (shouldSkip) { entry.count--; } }; -}; \ No newline at end of file +}; diff --git a/src/middleware/security.ts b/src/middleware/security.ts index 866f6ef..a0927a9 100644 --- a/src/middleware/security.ts +++ b/src/middleware/security.ts @@ -7,17 +7,17 @@ export const securityHeaders = async (c: Context, next: Next): Promise => c.res.headers.set( "Content-Security-Policy", "default-src 'self'; " + - "script-src 'self'; " + - "style-src 'self' 'unsafe-inline'; " + - "img-src 'self' data: https:; " + - "font-src 'self'; " + - "connect-src 'self'; " + - "media-src 'self'; " + - "object-src 'none'; " + - "base-uri 'self'; " + - "form-action 'self'; " + - "frame-ancestors 'none'; " + - "upgrade-insecure-requests" + "script-src 'self'; " + + "style-src 'self' 'unsafe-inline'; " + + "img-src 'self' data: https:; " + + "font-src 'self'; " + + "connect-src 'self'; " + + "media-src 'self'; " + + "object-src 'none'; " + + "base-uri 'self'; " + + "form-action 'self'; " + + "frame-ancestors 'none'; " + + "upgrade-insecure-requests" ); // Security headers @@ -35,4 +35,4 @@ export const securityHeaders = async (c: Context, next: Next): Promise => // Remove server identification c.res.headers.delete("Server"); c.res.headers.delete("X-Powered-By"); -}; \ No newline at end of file +}; diff --git a/src/middleware/usage.ts b/src/middleware/usage.ts index 3366b1c..0c957af 100644 --- a/src/middleware/usage.ts +++ b/src/middleware/usage.ts @@ -9,25 +9,22 @@ import { HTTPException } from "hono/http-exception"; export const checkNoteLimits = async (c: Context, next: Next) => { const userId = c.get("userId"); - const FREE_TIER_NOTE_LIMIT = process.env.FREE_TIER_NOTE_LIMIT ? parseInt(process.env.FREE_TIER_NOTE_LIMIT) : 1000; - + const FREE_TIER_NOTE_LIMIT = process.env.FREE_TIER_NOTE_LIMIT + ? parseInt(process.env.FREE_TIER_NOTE_LIMIT) + : 1000; + const { db, notes } = await import("../db"); const { eq, and, count, isNull, or } = await import("drizzle-orm"); - + const noteCountResult = await db .select({ count: count(), }) .from(notes) - .where( - and( - eq(notes.userId, userId), - or(isNull(notes.deleted), eq(notes.deleted, false)) - ) - ); - + .where(and(eq(notes.userId, userId), or(isNull(notes.deleted), eq(notes.deleted, false)))); + const currentNoteCount = noteCountResult[0]?.count || 0; - + if (currentNoteCount >= FREE_TIER_NOTE_LIMIT) { throw new HTTPException(402, { message: `Note limit reached. You have ${currentNoteCount}/${FREE_TIER_NOTE_LIMIT} notes. Upgrade to create more notes.`, @@ -39,41 +36,40 @@ export const checkNoteLimits = async (c: Context, next: Next) => { }, }); } - + await next(); }; export const checkStorageLimits = (expectedFileSizeBytes: number) => { return async (c: Context, next: Next) => { const userId = c.get("userId"); - - const FREE_TIER_STORAGE_GB = process.env.FREE_TIER_STORAGE_GB ? parseFloat(process.env.FREE_TIER_STORAGE_GB) : 1; + + const FREE_TIER_STORAGE_GB = process.env.FREE_TIER_STORAGE_GB + ? parseFloat(process.env.FREE_TIER_STORAGE_GB) + : 1; const FREE_TIER_STORAGE_BYTES = FREE_TIER_STORAGE_GB * 1024 * 1024 * 1024; - + const { db, fileAttachments, notes } = await import("../db"); const { eq, and, sum, isNull, or } = await import("drizzle-orm"); - + const storageResult = await db .select({ totalBytes: sum(fileAttachments.size), }) .from(fileAttachments) .innerJoin(notes, eq(fileAttachments.noteId, notes.id)) - .where( - and( - eq(notes.userId, userId), - or(isNull(notes.deleted), eq(notes.deleted, false)) - ) - ); - - const currentStorageBytes = storageResult[0]?.totalBytes ? Number(storageResult[0].totalBytes) : 0; + .where(and(eq(notes.userId, userId), or(isNull(notes.deleted), eq(notes.deleted, false)))); + + const currentStorageBytes = storageResult[0]?.totalBytes + ? Number(storageResult[0].totalBytes) + : 0; const currentStorageMB = Math.round((currentStorageBytes / (1024 * 1024)) * 100) / 100; const expectedTotalBytes = currentStorageBytes + expectedFileSizeBytes; const expectedTotalMB = Math.round((expectedTotalBytes / (1024 * 1024)) * 100) / 100; - + if (expectedTotalBytes > FREE_TIER_STORAGE_BYTES) { const fileSizeMB = Math.round((expectedFileSizeBytes / (1024 * 1024)) * 100) / 100; - + throw new HTTPException(402, { message: `Storage limit would be exceeded. Current: ${currentStorageMB}MB, File: ${fileSizeMB}MB, Total: ${expectedTotalMB}MB, Limit: ${FREE_TIER_STORAGE_GB}GB. Upgrade for more storage.`, cause: { @@ -86,10 +82,10 @@ export const checkStorageLimits = (expectedFileSizeBytes: number) => { }, }); } - + c.set("currentStorageMB", currentStorageMB); c.set("expectedTotalMB", expectedTotalMB); - + await next(); }; }; @@ -100,12 +96,16 @@ export const checkStorageLimits = (expectedFileSizeBytes: number) => { export const checkUsageLimits = async (c: Context, next: Next) => { const userId = c.get("userId"); - const FREE_TIER_STORAGE_GB = process.env.FREE_TIER_STORAGE_GB ? parseFloat(process.env.FREE_TIER_STORAGE_GB) : 1; - const FREE_TIER_NOTE_LIMIT = process.env.FREE_TIER_NOTE_LIMIT ? parseInt(process.env.FREE_TIER_NOTE_LIMIT) : 1000; - + const FREE_TIER_STORAGE_GB = process.env.FREE_TIER_STORAGE_GB + ? parseFloat(process.env.FREE_TIER_STORAGE_GB) + : 1; + const FREE_TIER_NOTE_LIMIT = process.env.FREE_TIER_NOTE_LIMIT + ? parseInt(process.env.FREE_TIER_NOTE_LIMIT) + : 1000; + const { db, fileAttachments, notes } = await import("../db"); const { eq, and, sum, count, isNull, or } = await import("drizzle-orm"); - + const [storageResult, noteCountResult] = await Promise.all([ db .select({ @@ -113,34 +113,26 @@ export const checkUsageLimits = async (c: Context, next: Next) => { }) .from(fileAttachments) .innerJoin(notes, eq(fileAttachments.noteId, notes.id)) - .where( - and( - eq(notes.userId, userId), - or(isNull(notes.deleted), eq(notes.deleted, false)) - ) - ), + .where(and(eq(notes.userId, userId), or(isNull(notes.deleted), eq(notes.deleted, false)))), db .select({ count: count(), }) .from(notes) - .where( - and( - eq(notes.userId, userId), - or(isNull(notes.deleted), eq(notes.deleted, false)) - ) - ), + .where(and(eq(notes.userId, userId), or(isNull(notes.deleted), eq(notes.deleted, false)))), ]); - - const currentStorageBytes = storageResult[0]?.totalBytes ? Number(storageResult[0].totalBytes) : 0; + + const currentStorageBytes = storageResult[0]?.totalBytes + ? Number(storageResult[0].totalBytes) + : 0; const currentStorageGB = currentStorageBytes / (1024 * 1024 * 1024); const currentNoteCount = noteCountResult[0]?.count || 0; - + c.set("currentStorageBytes", currentStorageBytes); c.set("currentStorageGB", currentStorageGB); c.set("currentNoteCount", currentNoteCount); c.set("storageLimitGB", FREE_TIER_STORAGE_GB); c.set("noteLimitCount", FREE_TIER_NOTE_LIMIT); - + await next(); -}; \ No newline at end of file +}; diff --git a/src/routes/code.ts b/src/routes/code.ts index f5223f2..abddff8 100644 --- a/src/routes/code.ts +++ b/src/routes/code.ts @@ -2,6 +2,7 @@ import { Hono } from "hono"; import { zValidator } from "@hono/zod-validator"; import { HTTPException } from "hono/http-exception"; import { z } from "zod"; +import { logger } from "../lib/logger"; const codeRouter = new Hono(); @@ -10,7 +11,7 @@ const JUDGE0_API_KEY = process.env.JUDGE0_API_KEY; const JUDGE0_API_HOST = process.env.JUDGE0_API_HOST || "judge0-ce.p.rapidapi.com"; if (!JUDGE0_API_KEY) { - console.error("โŒ JUDGE0_API_KEY environment variable is required"); + logger.error("JUDGE0_API_KEY environment variable is required - code execution disabled"); process.exit(1); } @@ -51,25 +52,39 @@ async function makeJudge0Request(endpoint: string, options: RequestInit = {}) { clearTimeout(timeoutId); const duration = Date.now() - start; - console.log(`Judge0 API: ${options.method || 'GET'} ${endpoint} (${duration}ms)`); + logger.debug("Judge0 API request", { + method: options.method || "GET", + endpoint, + duration, + status: response.status, + }); if (!response.ok) { - const errorBody = await response.text().catch(() => ''); - console.error(`Judge0 API Error: ${response.status} ${response.statusText} - ${errorBody}`); + const errorBody = await response.text().catch(() => ""); + logger.error("Judge0 API Error", { + status: response.status, + statusText: response.statusText, + endpoint, + errorBody: errorBody.substring(0, 200), // Limit error body length + }); let clientMessage = "Code execution failed. Please try again."; let statusCode = response.status; if (response.status === 429) { - clientMessage = "Code execution service is temporarily busy. Please try again in a few minutes."; + clientMessage = + "Code execution service is temporarily busy. Please try again in a few minutes."; } else if (response.status === 401 || response.status === 403) { - clientMessage = "Code execution service is temporarily unavailable. Please contact support."; + clientMessage = + "Code execution service is temporarily unavailable. Please contact support."; statusCode = 503; } else if (response.status >= 500) { - clientMessage = "Code execution service is temporarily unavailable. Please try again later."; + clientMessage = + "Code execution service is temporarily unavailable. Please try again later."; statusCode = 503; } + // noinspection ExceptionCaughtLocallyJS throw new HTTPException(statusCode, { message: clientMessage, }); @@ -83,96 +98,101 @@ async function makeJudge0Request(endpoint: string, options: RequestInit = {}) { throw error; } - if (error.name === 'AbortError') { - console.error("Judge0 API timeout"); + if (error.name === "AbortError") { + logger.error("Judge0 API timeout", { endpoint }); throw new HTTPException(504, { - message: "Code execution timed out. Please try again." + message: "Code execution timed out. Please try again.", }); } - console.error("Judge0 API request failed:", error); + logger.error( + "Judge0 API request failed", + { + endpoint, + error: error instanceof Error ? error.message : String(error), + }, + error instanceof Error ? error : undefined + ); throw new HTTPException(503, { message: "Code execution service temporarily unavailable", }); } } -codeRouter.post( - "/execute", - zValidator("json", executeCodeSchema), - async (c) => { - try { - const body = c.req.valid("json"); - - const submissionData = { - ...body, - source_code: Buffer.from(body.source_code).toString("base64"), - stdin: Buffer.from(body.stdin || "").toString("base64"), - }; - - const executionStart = Date.now(); - const response = await makeJudge0Request("/submissions?base64_encoded=true", { - method: "POST", - body: JSON.stringify(submissionData), - }); - - const result = await response.json(); - const executionDuration = Date.now() - executionStart; +codeRouter.post("/execute", zValidator("json", executeCodeSchema), async (c) => { + try { + const body = c.req.valid("json"); + const submissionData = { + ...body, + source_code: Buffer.from(body.source_code).toString("base64"), + stdin: Buffer.from(body.stdin || "").toString("base64"), + }; - return c.json(result); - } catch (error) { - if (error instanceof HTTPException) { - throw error; - } + const response = await makeJudge0Request("/submissions?base64_encoded=true", { + method: "POST", + body: JSON.stringify(submissionData), + }); - console.error("Code execution error:", error); - throw new HTTPException(500, { - message: "Failed to submit code for execution", - }); + const result = await response.json(); + return c.json(result); + } catch (error) { + if (error instanceof HTTPException) { + throw error; } - } -); -codeRouter.get( - "/status/:token", - zValidator("param", tokenSchema), - async (c) => { - try { - const { token } = c.req.valid("param"); + logger.error( + "Code execution error", + { + error: error instanceof Error ? error.message : String(error), + }, + error instanceof Error ? error : undefined + ); + throw new HTTPException(500, { + message: "Failed to submit code for execution", + }); + } +}); - const response = await makeJudge0Request( - `/submissions/${token}?base64_encoded=true` - ); +codeRouter.get("/status/:token", zValidator("param", tokenSchema), async (c) => { + try { + const { token } = c.req.valid("param"); - const result = await response.json(); + const response = await makeJudge0Request(`/submissions/${token}?base64_encoded=true`); - if (result.stdout) { - result.stdout = Buffer.from(result.stdout, "base64").toString("utf-8"); - } - if (result.stderr) { - result.stderr = Buffer.from(result.stderr, "base64").toString("utf-8"); - } - if (result.compile_output) { - result.compile_output = Buffer.from(result.compile_output, "base64").toString("utf-8"); - } - if (result.message) { - result.message = Buffer.from(result.message, "base64").toString("utf-8"); - } + const result = await response.json(); - return c.json(result); - } catch (error) { - if (error instanceof HTTPException) { - throw error; - } + if (result.stdout) { + result.stdout = Buffer.from(result.stdout, "base64").toString("utf-8"); + } + if (result.stderr) { + result.stderr = Buffer.from(result.stderr, "base64").toString("utf-8"); + } + if (result.compile_output) { + result.compile_output = Buffer.from(result.compile_output, "base64").toString("utf-8"); + } + if (result.message) { + result.message = Buffer.from(result.message, "base64").toString("utf-8"); + } - console.error("Status check error:", error); - throw new HTTPException(500, { - message: "Failed to check execution status", - }); + return c.json(result); + } catch (error) { + if (error instanceof HTTPException) { + throw error; } + + logger.error( + "Status check error", + { + error: error instanceof Error ? error.message : String(error), + }, + error instanceof Error ? error : undefined + ); + throw new HTTPException(500, { + message: "Failed to check execution status", + }); } -); +}); codeRouter.get("/languages", async (c) => { try { @@ -184,7 +204,13 @@ codeRouter.get("/languages", async (c) => { throw error; } - console.error("Languages fetch error:", error); + logger.error( + "Languages fetch error", + { + error: error instanceof Error ? error.message : String(error), + }, + error instanceof Error ? error : undefined + ); throw new HTTPException(500, { message: "Failed to fetch supported languages", }); @@ -202,20 +228,32 @@ codeRouter.get("/health", async (c) => { timestamp: new Date().toISOString(), }); } else { - return c.json({ - status: "degraded", - judge0: "partial_connectivity", - timestamp: new Date().toISOString(), - }, 207); // Multi-status + return c.json( + { + status: "degraded", + judge0: "partial_connectivity", + timestamp: new Date().toISOString(), + }, + 207 + ); // Multi-status } } catch (error) { - console.error("Judge0 health check failed:", error); - return c.json({ - status: "unhealthy", - judge0: "disconnected", - timestamp: new Date().toISOString(), - }, 503); + logger.error( + "Judge0 health check failed", + { + error: error instanceof Error ? error.message : String(error), + }, + error instanceof Error ? error : undefined + ); + return c.json( + { + status: "unhealthy", + judge0: "disconnected", + timestamp: new Date().toISOString(), + }, + 503 + ); } }); -export default codeRouter; \ No newline at end of file +export default codeRouter; diff --git a/src/routes/files.ts b/src/routes/files.ts index 0fc8f20..8490ce6 100644 --- a/src/routes/files.ts +++ b/src/routes/files.ts @@ -9,81 +9,74 @@ import { checkStorageLimits } from "../middleware/usage"; const filesRouter = new Hono(); -const maxFileSize = process.env.MAX_FILE_SIZE_MB - ? parseInt(process.env.MAX_FILE_SIZE_MB) - : 50; -const maxNoteSize = process.env.MAX_NOTE_SIZE_MB - ? parseInt(process.env.MAX_NOTE_SIZE_MB) - : 1024; - -filesRouter.post( - "/notes/:noteId/files", - zValidator("json", uploadFileSchema), - async (c) => { - const userId = c.get("userId"); - const noteId = c.req.param("noteId"); - const data = c.req.valid("json"); - - await checkStorageLimits(data.size)(c, async () => {}); - - const note = await db.query.notes.findFirst({ - where: and(eq(notes.id, noteId), eq(notes.userId, userId)), +const maxFileSize = process.env.MAX_FILE_SIZE_MB ? parseInt(process.env.MAX_FILE_SIZE_MB) : 50; +const maxNoteSize = process.env.MAX_NOTE_SIZE_MB ? parseInt(process.env.MAX_NOTE_SIZE_MB) : 1024; + +filesRouter.post("/notes/:noteId/files", zValidator("json", uploadFileSchema), async (c) => { + const userId = c.get("userId"); + const noteId = c.req.param("noteId"); + const data = c.req.valid("json"); + + await checkStorageLimits(data.size)(c, async () => {}); + + const note = await db.query.notes.findFirst({ + where: and(eq(notes.id, noteId), eq(notes.userId, userId)), + }); + + if (!note) { + throw new HTTPException(403, { message: "Access denied" }); + } + + const maxFileSizeBytes = maxFileSize * 1024 * 1024; + if (data.size > maxFileSizeBytes) { + throw new HTTPException(413, { + message: `File too large. Maximum size is ${maxFileSize}MB`, }); + } + + // noinspection SqlNoDataSourceInspection + const result = await db + .select({ totalSize: sql`COALESCE(SUM(size), 0)` }) + .from(fileAttachments) + .where(eq(fileAttachments.noteId, noteId)); - if (!note) { - throw new HTTPException(403, { message: "Access denied" }); - } - - const maxFileSizeBytes = maxFileSize * 1024 * 1024; - if (data.size > maxFileSizeBytes) { - throw new HTTPException(413, { - message: `File too large. Maximum size is ${maxFileSize}MB`, - }); - } - - const result = await db - .select({ totalSize: sql`COALESCE(SUM(size), 0)` }) - .from(fileAttachments) - .where(eq(fileAttachments.noteId, noteId)); - - const totalSize = Number(result[0]?.totalSize || 0); - const newFileSize = Number(data.size); - const combinedSize = totalSize + newFileSize; - const maxNoteSizeBytes = maxNoteSize * 1024 * 1024; - - if (combinedSize > maxNoteSizeBytes) { - throw new HTTPException(413, { - message: `Total attachment size for this note would exceed ${maxNoteSize}MB limit`, - }); - } - - const filename = `${randomUUID()}_${Date.now()}`; - - const [newAttachment] = await db - .insert(fileAttachments) - .values({ - noteId, - filename, - originalName: data.originalName, - mimeType: data.mimeType, - size: data.size, - encryptedData: data.encryptedData, - iv: data.iv, - salt: data.salt, - }) - .returning({ - id: fileAttachments.id, - noteId: fileAttachments.noteId, - filename: fileAttachments.filename, - originalName: fileAttachments.originalName, - mimeType: fileAttachments.mimeType, - size: fileAttachments.size, - uploadedAt: fileAttachments.uploadedAt, - }); - - return c.json(newAttachment, 201); - }, -); + const totalSize = Number(result[0]?.totalSize || 0); + const newFileSize = Number(data.size); + const combinedSize = totalSize + newFileSize; + const maxNoteSizeBytes = maxNoteSize * 1024 * 1024; + + if (combinedSize > maxNoteSizeBytes) { + throw new HTTPException(413, { + message: `Total attachment size for this note would exceed ${maxNoteSize}MB limit`, + }); + } + + const filename = `${randomUUID()}_${Date.now()}`; + + const [newAttachment] = await db + .insert(fileAttachments) + .values({ + noteId, + filename, + originalName: data.originalName, + mimeType: data.mimeType, + size: data.size, + encryptedData: data.encryptedData, + iv: data.iv, + salt: data.salt, + }) + .returning({ + id: fileAttachments.id, + noteId: fileAttachments.noteId, + filename: fileAttachments.filename, + originalName: fileAttachments.originalName, + mimeType: fileAttachments.mimeType, + size: fileAttachments.size, + uploadedAt: fileAttachments.uploadedAt, + }); + + return c.json(newAttachment, 201); +}); filesRouter.get("/files/:fileId", async (c) => { const userId = c.get("userId"); diff --git a/src/routes/folders.ts b/src/routes/folders.ts index 6c08d65..93f8f9c 100644 --- a/src/routes/folders.ts +++ b/src/routes/folders.ts @@ -9,6 +9,9 @@ import { reorderFolderSchema, } from "../lib/validation"; import { eq, and, desc, count, asc, isNull } from "drizzle-orm"; +import { getCache, setCache, deleteCache } from "../lib/cache"; +import { CacheKeys, CacheTTL } from "../lib/cache-keys"; +import { logger } from "../lib/logger"; const foldersRouter = new Hono(); @@ -16,6 +19,15 @@ foldersRouter.get("/", zValidator("query", foldersQuerySchema), async (c) => { const userId = c.get("userId"); const query = c.req.valid("query"); + // Try cache first (only for page 1, no filters) + if (query.page === 1 && !query.parentId) { + const cacheKey = CacheKeys.foldersList(userId); + const cached = await getCache(cacheKey); + if (cached) { + return c.json(cached); + } + } + const conditions = [eq(folders.userId, userId)]; if (query.parentId !== undefined) { @@ -24,10 +36,7 @@ foldersRouter.get("/", zValidator("query", foldersQuerySchema), async (c) => { const whereClause = and(...conditions); - const [{ total }] = await db - .select({ total: count() }) - .from(folders) - .where(whereClause); + const [{ total }] = await db.select({ total: count() }).from(folders).where(whereClause); const offset = (query.page - 1) * query.limit; const userFolders = await db.query.folders.findMany({ @@ -53,7 +62,7 @@ foldersRouter.get("/", zValidator("query", foldersQuerySchema), async (c) => { notes: undefined, // Remove notes from response to keep it clean })); - return c.json({ + const result = { folders: foldersWithCounts, pagination: { page: query.page, @@ -61,7 +70,15 @@ foldersRouter.get("/", zValidator("query", foldersQuerySchema), async (c) => { total, pages: Math.ceil(total / query.limit), }, - }); + }; + + // Cache result (only for page 1, no filters) + if (query.page === 1 && !query.parentId) { + const cacheKey = CacheKeys.foldersList(userId); + await setCache(cacheKey, result, CacheTTL.foldersList); + } + + return c.json(result); }); foldersRouter.get("/:id", async (c) => { @@ -110,16 +127,13 @@ foldersRouter.post("/", zValidator("json", createFolderSchema), async (c) => { const existingFolders = await db.query.folders.findMany({ where: and( eq(folders.userId, userId), - data.parentId - ? eq(folders.parentId, data.parentId) - : isNull(folders.parentId), + data.parentId ? eq(folders.parentId, data.parentId) : isNull(folders.parentId) ), orderBy: [desc(folders.sortOrder)], limit: 1, }); - const nextSortOrder = - existingFolders.length > 0 ? (existingFolders[0].sortOrder || 0) + 1 : 0; + const nextSortOrder = existingFolders.length > 0 ? (existingFolders[0].sortOrder || 0) + 1 : 0; const [newFolder] = await db .insert(folders) @@ -130,6 +144,9 @@ foldersRouter.post("/", zValidator("json", createFolderSchema), async (c) => { }) .returning(); + // Invalidate cache + await deleteCache(CacheKeys.foldersList(userId), CacheKeys.folderTree(userId)); + return c.json(newFolder, 201); }); @@ -174,87 +191,85 @@ foldersRouter.put("/:id", zValidator("json", updateFolderSchema), async (c) => { .where(eq(folders.id, folderId)) .returning(); + // Invalidate cache + await deleteCache(CacheKeys.foldersList(userId), CacheKeys.folderTree(userId)); + return c.json(updatedFolder); }); -foldersRouter.put( - "/:id/reorder", - zValidator("json", reorderFolderSchema), - async (c) => { - const userId = c.get("userId"); - const folderId = c.req.param("id"); - const { newIndex } = c.req.valid("json"); - - // Check if folder exists and belongs to user - const folderToMove = await db.query.folders.findFirst({ - where: and(eq(folders.id, folderId), eq(folders.userId, userId)), - }); +foldersRouter.put("/:id/reorder", zValidator("json", reorderFolderSchema), async (c) => { + const userId = c.get("userId"); + const folderId = c.req.param("id"); + const { newIndex } = c.req.valid("json"); - if (!folderToMove) { - throw new HTTPException(404, { message: "Folder not found" }); - } + // Check if folder exists and belongs to user + const folderToMove = await db.query.folders.findFirst({ + where: and(eq(folders.id, folderId), eq(folders.userId, userId)), + }); - // Get all folders in the same parent scope (same parentId) for this user - const siblingFolders = await db.query.folders.findMany({ - where: and( - eq(folders.userId, userId), - folderToMove.parentId - ? eq(folders.parentId, folderToMove.parentId) - : isNull(folders.parentId), - ), - orderBy: [asc(folders.sortOrder), desc(folders.createdAt)], - }); + if (!folderToMove) { + throw new HTTPException(404, { message: "Folder not found" }); + } - // Validate newIndex - if (newIndex < 0 || newIndex >= siblingFolders.length) { - throw new HTTPException(400, { message: "Invalid new index" }); - } + // Get all folders in the same parent scope (same parentId) for this user + const siblingFolders = await db.query.folders.findMany({ + where: and( + eq(folders.userId, userId), + folderToMove.parentId ? eq(folders.parentId, folderToMove.parentId) : isNull(folders.parentId) + ), + orderBy: [asc(folders.sortOrder), desc(folders.createdAt)], + }); - // Find current position of the folder - const currentIndex = siblingFolders.findIndex( - (folder) => folder.id === folderId, - ); - if (currentIndex === -1) { - throw new HTTPException(404, { message: "Folder not found in siblings" }); - } + // Validate newIndex + if (newIndex < 0 || newIndex >= siblingFolders.length) { + throw new HTTPException(400, { message: "Invalid new index" }); + } - // If already in correct position, no need to do anything - if (currentIndex === newIndex) { - return c.json({ message: "Folder already in correct position" }); - } + // Find current position of the folder + const currentIndex = siblingFolders.findIndex((folder) => folder.id === folderId); + if (currentIndex === -1) { + throw new HTTPException(404, { message: "Folder not found in siblings" }); + } - try { - // Use a transaction to ensure consistency - await db.transaction(async (tx) => { - // Create a new array with the folder moved to the new position - const reorderedFolders = [...siblingFolders]; - const [movedFolder] = reorderedFolders.splice(currentIndex, 1); - reorderedFolders.splice(newIndex, 0, movedFolder); - - // Update sort order for all affected folders - const updatePromises = reorderedFolders.map((folder, index) => - tx - .update(folders) - .set({ - sortOrder: index, - updatedAt: new Date(), - }) - .where(eq(folders.id, folder.id)), - ); + // If already in correct position, no need to do anything + if (currentIndex === newIndex) { + return c.json({ message: "Folder already in correct position" }); + } - await Promise.all(updatePromises); - }); + try { + // Use a transaction to ensure consistency + await db.transaction(async (tx) => { + // Create a new array with the folder moved to the new position + const reorderedFolders = [...siblingFolders]; + const [movedFolder] = reorderedFolders.splice(currentIndex, 1); + reorderedFolders.splice(newIndex, 0, movedFolder); + + // Update sort order for all affected folders + const updatePromises = reorderedFolders.map((folder, index) => + tx + .update(folders) + .set({ + sortOrder: index, + updatedAt: new Date(), + }) + .where(eq(folders.id, folder.id)) + ); + + await Promise.all(updatePromises); + }); - return c.json({ - message: "Folder reordered successfully", - folderId, - newIndex, - }); - } catch { - throw new HTTPException(500, { message: "Failed to reorder folders" }); - } - }, -); + // Invalidate cache + await deleteCache(CacheKeys.foldersList(userId), CacheKeys.folderTree(userId)); + + return c.json({ + message: "Folder reordered successfully", + folderId, + newIndex, + }); + } catch { + throw new HTTPException(500, { message: "Failed to reorder folders" }); + } +}); foldersRouter.delete("/:id", async (c) => { const userId = c.get("userId"); @@ -307,7 +322,7 @@ foldersRouter.delete("/:id", async (c) => { eq(folders.userId, userId), existingFolder.parentId ? eq(folders.parentId, existingFolder.parentId) - : isNull(folders.parentId), + : isNull(folders.parentId) ), orderBy: [asc(folders.sortOrder)], }); @@ -315,28 +330,36 @@ foldersRouter.delete("/:id", async (c) => { // Update sort order for remaining folders if (remainingFolders.length > 0) { const updatePromises = remainingFolders.map((folder, index) => - tx - .update(folders) - .set({ sortOrder: index }) - .where(eq(folders.id, folder.id)), + tx.update(folders).set({ sortOrder: index }).where(eq(folders.id, folder.id)) ); const updateResults = await Promise.all(updatePromises); // Verify all updates succeeded - const failedUpdates = updateResults.filter(result => result.rowCount === 0); + const failedUpdates = updateResults.filter((result) => result.rowCount === 0); if (failedUpdates.length > 0) { throw new Error(`Failed to reorder ${failedUpdates.length} folder(s) after deletion`); } } }); + // Invalidate cache + await deleteCache(CacheKeys.foldersList(userId), CacheKeys.folderTree(userId)); + return c.json({ message: "Folder deleted successfully" }); } catch (error) { - console.error(`Failed to delete folder ${folderId}:`, error); + logger.error( + `Failed to delete folder ${folderId}`, + { + folderId, + userId, + error: error instanceof Error ? error.message : String(error), + }, + error instanceof Error ? error : undefined + ); throw new HTTPException(500, { message: "Failed to delete folder", - cause: error instanceof Error ? error.message : "Unknown error" + cause: error instanceof Error ? error.message : "Unknown error", }); } }); diff --git a/src/routes/metrics.ts b/src/routes/metrics.ts index 9486673..f8faef5 100644 --- a/src/routes/metrics.ts +++ b/src/routes/metrics.ts @@ -22,15 +22,15 @@ interface SystemMetrics { } interface HealthStatus { - status: 'healthy' | 'degraded' | 'unhealthy'; + status: "healthy" | "degraded" | "unhealthy"; timestamp: string; uptime: number; environment: string; version: string; checks: { - memory: { status: 'pass' | 'warn' | 'fail'; details: string }; - database: { status: 'pass' | 'warn' | 'fail'; details: string }; - judge0: { status: 'pass' | 'warn' | 'fail'; details: string }; + memory: { status: "pass" | "warn" | "fail"; details: string }; + database: { status: "pass" | "warn" | "fail"; details: string }; + judge0: { status: "pass" | "warn" | "fail"; details: string }; }; } @@ -38,41 +38,42 @@ interface HealthStatus { metricsRouter.get("/health", async (c) => { const startTime = Date.now(); const memUsage = process.memoryUsage(); - const environment = process.env.NODE_ENV || 'development'; - const version = process.env.npm_package_version || '1.0.0'; + const environment = process.env.NODE_ENV || "development"; + const version = process.env.npm_package_version || "1.0.0"; // Memory check (warn if heap usage > 80%, fail if > 95%) const heapUsedPercent = (memUsage.heapUsed / memUsage.heapTotal) * 100; const memoryCheck = { - status: heapUsedPercent > 95 ? 'fail' as const : heapUsedPercent > 80 ? 'warn' as const : 'pass' as const, - details: `Heap usage: ${Math.round(heapUsedPercent)}% (${Math.round(memUsage.heapUsed / 1024 / 1024)}MB / ${Math.round(memUsage.heapTotal / 1024 / 1024)}MB)` + status: + heapUsedPercent > 95 + ? ("fail" as const) + : heapUsedPercent > 80 + ? ("warn" as const) + : ("pass" as const), + details: `Heap usage: ${Math.round(heapUsedPercent)}% (${Math.round(memUsage.heapUsed / 1024 / 1024)}MB / ${Math.round(memUsage.heapTotal / 1024 / 1024)}MB)`, }; // Database connectivity check - let databaseCheck = { status: 'pass' as const, details: 'Database connection healthy' }; + let databaseCheck = { status: "pass" as const, details: "Database connection healthy" }; try { // Basic database health check could be added here // For now, assume healthy if no errors are thrown } catch { - databaseCheck = { status: 'fail', details: 'Database connection failed' }; + databaseCheck = { status: "fail", details: "Database connection failed" }; } // Judge0 service check (optional) - let judge0Check = { status: 'pass' as const, details: 'Judge0 service available' }; - if (process.env.JUDGE0_API_KEY) { - // Could add actual Judge0 health check here - judge0Check = { status: 'pass', details: 'Judge0 API configured' }; - } else { - judge0Check = { status: 'warn', details: 'Judge0 API not configured' }; - } + const judge0Check = process.env.JUDGE0_API_KEY + ? { status: "pass" as const, details: "Judge0 API configured" } + : { status: "warn" as const, details: "Judge0 API not configured" }; // Determine overall status const checks = { memory: memoryCheck, database: databaseCheck, judge0: judge0Check }; - const hasFailures = Object.values(checks).some(check => check.status === 'fail'); - const hasWarnings = Object.values(checks).some(check => check.status === 'warn'); + const hasFailures = Object.values(checks).some((check) => check.status === "fail"); + const hasWarnings = Object.values(checks).some((check) => check.status === "warn"); - const overallStatus = hasFailures ? 'unhealthy' : hasWarnings ? 'degraded' : 'healthy'; - const statusCode = overallStatus === 'healthy' ? 200 : overallStatus === 'degraded' ? 207 : 503; + const overallStatus = hasFailures ? "unhealthy" : hasWarnings ? "degraded" : "healthy"; + const statusCode = overallStatus === "healthy" ? 200 : overallStatus === "degraded" ? 207 : 503; const healthStatus: HealthStatus = { status: overallStatus, @@ -80,18 +81,19 @@ metricsRouter.get("/health", async (c) => { uptime: process.uptime(), environment, version, - checks + checks, }; // Log health check const duration = Date.now() - startTime; - logger.info('Health check completed', { + logger.info("Health check completed", { status: overallStatus, duration, - checks: Object.entries(checks).map(([name, check]) => `${name}:${check.status}`).join(',') + checks: Object.entries(checks) + .map(([name, check]) => `${name}:${check.status}`) + .join(","), }); - return c.json(healthStatus, statusCode); }); @@ -100,8 +102,8 @@ metricsRouter.get("/metrics", async (c) => { const startTime = Date.now(); const memUsage = process.memoryUsage(); const cpuUsage = process.cpuUsage(); - const environment = process.env.NODE_ENV || 'development'; - const version = process.env.npm_package_version || '1.0.0'; + const environment = process.env.NODE_ENV || "development"; + const version = process.env.npm_package_version || "1.0.0"; const metrics: SystemMetrics = { timestamp: new Date().toISOString(), @@ -109,12 +111,11 @@ metricsRouter.get("/metrics", async (c) => { memory: memUsage, cpuUsage, environment, - version + version, }; - const duration = Date.now() - startTime; - logger.debug('System metrics retrieved', { duration }); + logger.debug("System metrics retrieved", { duration }); return c.json(metrics); }); @@ -124,7 +125,7 @@ metricsRouter.get("/ready", async (c) => { // Simple readiness check - service is ready if it can respond return c.json({ status: "ready", - timestamp: new Date().toISOString() + timestamp: new Date().toISOString(), }); }); @@ -134,8 +135,8 @@ metricsRouter.get("/live", async (c) => { return c.json({ status: "alive", timestamp: new Date().toISOString(), - uptime: process.uptime() + uptime: process.uptime(), }); }); -export default metricsRouter; \ No newline at end of file +export default metricsRouter; diff --git a/src/routes/notes.ts b/src/routes/notes.ts index f3db0a1..c96bbc7 100644 --- a/src/routes/notes.ts +++ b/src/routes/notes.ts @@ -2,11 +2,7 @@ import { Hono } from "hono"; import { zValidator } from "@hono/zod-validator"; import { HTTPException } from "hono/http-exception"; import { db, notes } from "../db"; -import { - createNoteSchema, - updateNoteSchema, - notesQuerySchema, -} from "../lib/validation"; +import { createNoteSchema, updateNoteSchema, notesQuerySchema } from "../lib/validation"; import { eq, and, desc, or, ilike, count, SQL } from "drizzle-orm"; import { checkNoteLimits } from "../middleware/usage"; @@ -40,25 +36,18 @@ notesRouter.get("/", zValidator("query", notesQuerySchema), async (c) => { if (query.search) { const escapedSearch = query.search - .replace(/\\/g, '\\\\') - .replace(/%/g, '\\%') - .replace(/_/g, '\\_'); + .replace(/\\/g, "\\\\") + .replace(/%/g, "\\%") + .replace(/_/g, "\\_"); conditions.push( - or( - ilike(notes.title, `%${escapedSearch}%`), - ilike(notes.content, `%${escapedSearch}%`), - )!, + or(ilike(notes.title, `%${escapedSearch}%`), ilike(notes.content, `%${escapedSearch}%`))! ); } - const whereClause = - conditions.length > 1 ? and(...conditions) : conditions[0]; + const whereClause = conditions.length > 1 ? and(...conditions) : conditions[0]; - const [{ total }] = await db - .select({ total: count() }) - .from(notes) - .where(whereClause); + const [{ total }] = await db.select({ total: count() }).from(notes).where(whereClause); const offset = (query.page - 1) * query.limit; const userNotes = await db.query.notes.findMany({ @@ -73,7 +62,7 @@ notesRouter.get("/", zValidator("query", notesQuerySchema), async (c) => { }); // Add attachmentCount to each note and remove full attachments array - const notesWithAttachmentCount = userNotes.map(note => ({ + const notesWithAttachmentCount = userNotes.map((note) => ({ ...note, attachmentCount: note.attachments.length, attachments: undefined, @@ -157,9 +146,7 @@ notesRouter.delete("/empty-trash", async (c) => { .from(notes) .where(and(eq(notes.userId, userId), eq(notes.deleted, true))); - await db - .delete(notes) - .where(and(eq(notes.userId, userId), eq(notes.deleted, true))); + await db.delete(notes).where(and(eq(notes.userId, userId), eq(notes.deleted, true))); return c.json({ success: true, diff --git a/src/routes/users.ts b/src/routes/users.ts index 33b27c1..5a26439 100644 --- a/src/routes/users.ts +++ b/src/routes/users.ts @@ -6,17 +6,21 @@ const usersRouter = new Hono(); usersRouter.get("/me", async (c) => { const user = getCurrentUser(c); const includeUsage = c.req.query("include_usage") === "true"; - + if (!includeUsage) { return c.json(user); } - - const FREE_TIER_STORAGE_GB = process.env.FREE_TIER_STORAGE_GB ? parseFloat(process.env.FREE_TIER_STORAGE_GB) : 1; - const FREE_TIER_NOTE_LIMIT = process.env.FREE_TIER_NOTE_LIMIT ? parseInt(process.env.FREE_TIER_NOTE_LIMIT) : 1000; - + + const FREE_TIER_STORAGE_GB = process.env.FREE_TIER_STORAGE_GB + ? parseFloat(process.env.FREE_TIER_STORAGE_GB) + : 1; + const FREE_TIER_NOTE_LIMIT = process.env.FREE_TIER_NOTE_LIMIT + ? parseInt(process.env.FREE_TIER_NOTE_LIMIT) + : 1000; + const { db, fileAttachments, notes } = await import("../db"); const { eq, and, sum, count, isNull, or } = await import("drizzle-orm"); - + const storageResult = await db .select({ totalBytes: sum(fileAttachments.size), @@ -29,27 +33,22 @@ usersRouter.get("/me", async (c) => { or(isNull(notes.deleted), eq(notes.deleted, false)) // Only count files from non-deleted notes ) ); - + const noteCountResult = await db .select({ count: count(), }) .from(notes) - .where( - and( - eq(notes.userId, user.id), - or(isNull(notes.deleted), eq(notes.deleted, false)) - ) - ); - + .where(and(eq(notes.userId, user.id), or(isNull(notes.deleted), eq(notes.deleted, false)))); + const totalBytes = storageResult[0]?.totalBytes ? Number(storageResult[0].totalBytes) : 0; const totalMB = Math.round((totalBytes / (1024 * 1024)) * 100) / 100; const totalGB = Math.round((totalMB / 1024) * 100) / 100; const noteCount = noteCountResult[0]?.count || 0; - + const storageUsagePercent = Math.round((totalGB / FREE_TIER_STORAGE_GB) * 100 * 100) / 100; const noteUsagePercent = Math.round((noteCount / FREE_TIER_NOTE_LIMIT) * 100 * 100) / 100; - + return c.json({ ...user, usage: { diff --git a/src/server.ts b/src/server.ts index cdf2994..a081870 100644 --- a/src/server.ts +++ b/src/server.ts @@ -1,20 +1,19 @@ -require('newrelic'); +const newrelic = require("newrelic"); import "dotenv-flow/config"; -const isDevelopment = process.env.NODE_ENV === 'development'; - +const isDevelopment = process.env.NODE_ENV === "development"; import { Hono } from "hono"; import { cors } from "hono/cors"; import { bodyLimit } from "hono/body-limit"; import { HTTPException } from "hono/http-exception"; -// import { serve } from "@hono/node-server"; // Unused since we use custom HTTP server import { createServer } from "http"; import { WebSocketManager } from "./websocket"; import { authMiddleware } from "./middleware/auth"; import { securityHeaders } from "./middleware/security"; import { rateLimit, cleanup as rateLimitCleanup } from "./middleware/rate-limit"; +import { closeCache } from "./lib/cache"; import foldersRouter from "./routes/folders"; import notesRouter from "./routes/notes"; import usersRouter from "./routes/users"; @@ -24,9 +23,15 @@ import metricsRouter from "./routes/metrics"; import { VERSION } from "./version"; import { logger } from "./lib/logger"; -const maxFileSize = process.env.MAX_FILE_SIZE_MB - ? parseInt(process.env.MAX_FILE_SIZE_MB) - : 50; +// Verify New Relic is connected +if (newrelic.agent?.config?.agent_enabled) { + console.log("โœ… New Relic agent is enabled"); + console.log("๐Ÿ“Š App Name:", newrelic.agent.config.app_name); +} else { + logger.error("New Relic agent is NOT enabled - check license key"); +} + +const maxFileSize = process.env.MAX_FILE_SIZE_MB ? parseInt(process.env.MAX_FILE_SIZE_MB) : 50; const maxBodySize = Math.ceil(maxFileSize * 1.35); const app = new Hono(); @@ -54,18 +59,37 @@ app.use("*", async (c, next) => { logger.httpRequest(method, path, status, duration, userId); if (isDevelopment) { - const emoji = status >= 200 && status < 300 ? 'โœ…' : - status >= 400 && status < 500 ? 'โš ๏ธ' : 'โŒ'; + const emoji = + status >= 200 && status < 300 ? "โœ…" : status >= 400 && status < 500 ? "โš ๏ธ" : "โŒ"; console.log(`${emoji} [${method}] ${path} - ${status} (${duration}ms)`); } }); +// HTTP API Rate Limiting Configuration +const httpRateLimitWindow = process.env.HTTP_RATE_LIMIT_WINDOW_MS + ? parseInt(process.env.HTTP_RATE_LIMIT_WINDOW_MS) + : 15 * 60 * 1000; // 15 minutes + +const httpRateLimitMax = process.env.HTTP_RATE_LIMIT_MAX_REQUESTS + ? parseInt(process.env.HTTP_RATE_LIMIT_MAX_REQUESTS) + : 1000; + +const fileRateLimitMax = process.env.HTTP_FILE_RATE_LIMIT_MAX + ? parseInt(process.env.HTTP_FILE_RATE_LIMIT_MAX) + : 100; + +logger.info("HTTP rate limiting configured", { + windowMinutes: httpRateLimitWindow / 1000 / 60, + maxRequests: httpRateLimitMax, + fileMaxRequests: fileRateLimitMax, +}); + // Apply rate limiting app.use( "*", rateLimit({ - windowMs: 15 * 60 * 1000, // 15 minutes - max: 1000, // 1000 requests per window (increased from 100) + windowMs: httpRateLimitWindow, + max: httpRateLimitMax, }) ); @@ -73,8 +97,8 @@ app.use( app.use( "/api/files/*", rateLimit({ - windowMs: 15 * 60 * 1000, // 15 minutes - max: 100, // 100 file operations per window (increased from 10) + windowMs: httpRateLimitWindow, + max: fileRateLimitMax, }) ); @@ -90,19 +114,21 @@ app.use( error: `Request body too large. Maximum file size is ${maxFileSize}MB`, status: 413, }, - 413, + 413 ); }, - }), + }) ); -app.use("*", async (c, next) => { - await next(); -}); - -const corsOrigins = process.env.CORS_ORIGINS +const corsOrigins = process.env.CORS_ORIGINS ? process.env.CORS_ORIGINS.split(",").map((origin) => origin.trim()) - : ["http://localhost:3000", "http://localhost:5173"]; + : []; + +if (corsOrigins.length === 0) { + logger.warn("CORS_ORIGINS not configured - all cross-origin requests will be blocked", { + recommendation: "Set CORS_ORIGINS environment variable with your frontend URLs", + }); +} app.use( "*", @@ -110,19 +136,9 @@ app.use( origin: corsOrigins, credentials: true, allowMethods: ["GET", "POST", "PUT", "DELETE", "OPTIONS"], - allowHeaders: [ - "Content-Type", - "Authorization", - "Cookie", - "X-Client-ID", - "X-Client-Secret", - ], - exposeHeaders: [ - "Set-Cookie", - "X-Auth-Refresh-Required", - "WWW-Authenticate", - ], - }), + allowHeaders: ["Content-Type", "Authorization", "Cookie", "X-Client-ID", "X-Client-Secret"], + exposeHeaders: ["Set-Cookie", "X-Auth-Refresh-Required", "WWW-Authenticate"], + }) ); app.get("/", (c) => { @@ -160,29 +176,27 @@ app.route("/", metricsRouter); app.use("*", authMiddleware); -// Rate limiting for code execution - AFTER auth so users are properly identified -const codeRateLimit = (() => { - if (process.env.CODE_EXEC_RATE_LIMIT_MAX) { - return parseInt(process.env.CODE_EXEC_RATE_LIMIT_MAX); - } - // More reasonable defaults now that we have per-user limits - return process.env.NODE_ENV === 'development' ? 100 : 50; -})(); +// Code Execution Rate Limiting Configuration - AFTER auth so users are properly identified +const codeRateLimitMax = process.env.CODE_EXEC_RATE_LIMIT_MAX + ? parseInt(process.env.CODE_EXEC_RATE_LIMIT_MAX) + : process.env.NODE_ENV === "development" + ? 100 + : 50; -const codeRateWindow = (() => { - if (process.env.CODE_EXEC_RATE_WINDOW_MS) { - return parseInt(process.env.CODE_EXEC_RATE_WINDOW_MS); - } - return 15 * 60 * 1000; // 15 minutes for both dev and prod -})(); +const codeRateLimitWindow = process.env.CODE_EXEC_RATE_WINDOW_MS + ? parseInt(process.env.CODE_EXEC_RATE_WINDOW_MS) + : 15 * 60 * 1000; // 15 minutes -console.log(`๐Ÿ”ง Code execution rate limit: ${codeRateLimit} requests per ${codeRateWindow / 1000 / 60} minutes`); +logger.info("Code execution rate limiting configured", { + windowMinutes: codeRateLimitWindow / 1000 / 60, + maxRequests: codeRateLimitMax, +}); app.use( "/api/code/*", rateLimit({ - windowMs: codeRateWindow, - max: codeRateLimit, + windowMs: codeRateLimitWindow, + max: codeRateLimitMax, }) ); @@ -198,14 +212,20 @@ app.onError((err, c) => { // Get user context const userId = c.get("userId") || "anonymous"; - const _userEmail = c.get("user")?.email; // Log full error details server-side only - console.error(`[ERROR ${errorId}] API Error:`, err.message); - console.error(`[ERROR ${errorId}] Stack:`, err.stack); - console.error(`[ERROR ${errorId}] URL:`, c.req.url); - console.error(`[ERROR ${errorId}] Method:`, c.req.method); - console.error(`[ERROR ${errorId}] User:`, userId); + logger.error( + "API Error", + { + errorId, + message: err.message, + url: c.req.url, + method: c.req.method, + userId, + stack: err.stack ?? "no stack trace", + }, + err + ); // Error context logged above @@ -213,12 +233,28 @@ app.onError((err, c) => { // Log usage limit errors for billing analytics if (err.status === 402 && err.cause) { const userId = c.get("userId") || "anonymous"; - const cause = err.cause as { code: string; currentCount?: number; limit?: number; currentStorageMB?: number; fileSizeMB?: number; expectedTotalMB?: number; limitGB?: number }; + const cause = err.cause as { + code: string; + currentCount?: number; + limit?: number; + currentStorageMB?: number; + fileSizeMB?: number; + expectedTotalMB?: number; + limitGB?: number; + }; if (cause.code === "NOTE_LIMIT_EXCEEDED") { - console.log(`[BILLING] Note limit exceeded - User: ${userId}, Count: ${cause.currentCount}/${cause.limit}`); + logger.businessEvent("note_limit_exceeded", userId, { + currentCount: cause.currentCount ?? 0, + limit: cause.limit ?? 0, + }); } else if (cause.code === "STORAGE_LIMIT_EXCEEDED") { - console.log(`[BILLING] Storage limit exceeded - User: ${userId}, Storage: ${cause.currentStorageMB}MB + ${cause.fileSizeMB}MB = ${cause.expectedTotalMB}MB (Limit: ${cause.limitGB}GB)`); + logger.businessEvent("storage_limit_exceeded", userId, { + currentStorageMB: cause.currentStorageMB ?? 0, + fileSizeMB: cause.fileSizeMB ?? 0, + expectedTotalMB: cause.expectedTotalMB ?? 0, + limitGB: cause.limitGB ?? 0, + }); } } @@ -228,9 +264,9 @@ app.onError((err, c) => { error: err.message, status: err.status, timestamp: new Date().toISOString(), - ...(process.env.NODE_ENV === "development" && { errorId }) + ...(process.env.NODE_ENV === "development" && { errorId }), }, - err.status, + err.status ); } @@ -244,10 +280,10 @@ app.onError((err, c) => { timestamp: new Date().toISOString(), ...(process.env.NODE_ENV === "development" && { errorId, - stack: err.stack - }) + stack: err.stack ?? "no stack trace", + }), }, - 500, + 500 ); }); @@ -260,15 +296,18 @@ app.notFound((c) => { method: c.req.method, timestamp: new Date().toISOString(), }, - 404, + 404 ); }); const port = Number(process.env.PORT) || 3000; -const freeStorageGB = process.env.FREE_TIER_STORAGE_GB ? parseFloat(process.env.FREE_TIER_STORAGE_GB) : 1; -const freeNoteLimit = process.env.FREE_TIER_NOTE_LIMIT ? parseInt(process.env.FREE_TIER_NOTE_LIMIT) : 1000; - +const freeStorageGB = process.env.FREE_TIER_STORAGE_GB + ? parseFloat(process.env.FREE_TIER_STORAGE_GB) + : 1; +const freeNoteLimit = process.env.FREE_TIER_NOTE_LIMIT + ? parseInt(process.env.FREE_TIER_NOTE_LIMIT) + : 1000; logger.info("Typelets API server starting", { version: VERSION, @@ -277,29 +316,24 @@ logger.info("Typelets API server starting", { maxBodySize, freeStorageGB, freeNoteLimit, - corsOrigins: corsOrigins.join(','), - environment: process.env.NODE_ENV || 'development' + corsOrigins: corsOrigins.join(","), + environment: process.env.NODE_ENV || "development", }); -console.log( - "๐Ÿš€ Typelets API v" + VERSION + " started at:", - new Date().toISOString(), -); +console.log("๐Ÿš€ Typelets API v" + VERSION + " started at:", new Date().toISOString()); console.log(`๐Ÿ“ก Listening on port ${port}`); -console.log( - `๐Ÿ“ Max file size: ${maxFileSize}MB (body limit: ${maxBodySize}MB)`, -); +console.log(`๐Ÿ“ Max file size: ${maxFileSize}MB (body limit: ${maxBodySize}MB)`); console.log(`๐Ÿ’ฐ Free tier limits: ${freeStorageGB}GB storage, ${freeNoteLimit} notes`); console.log(`๐ŸŒ CORS origins:`, corsOrigins); const httpServer = createServer((req, res) => { let body = Buffer.alloc(0); - req.on('data', (chunk: Buffer) => { + req.on("data", (chunk: Buffer) => { body = Buffer.concat([body, chunk]); }); - req.on('end', async () => { + req.on("end", async () => { try { const requestInit: RequestInit = { method: req.method, @@ -320,16 +354,22 @@ const httpServer = createServer((req, res) => { const buffer = await response.arrayBuffer(); res.end(Buffer.from(buffer)); } catch (err) { - console.error('Request handling error:', err); + logger.error( + "Request handling error", + { + error: err instanceof Error ? err.message : String(err), + }, + err instanceof Error ? err : undefined + ); res.statusCode = 500; - res.end('Internal Server Error'); + res.end("Internal Server Error"); } }); - req.on('error', (err: Error) => { - console.error('Request error:', err); + req.on("error", (err: Error) => { + logger.error("Request error", { error: err.message }, err); res.statusCode = 500; - res.end('Internal Server Error'); + res.end("Internal Server Error"); }); }); @@ -337,30 +377,33 @@ const wsManager = new WebSocketManager(httpServer); // Graceful shutdown handling -const shutdown = (signal: string) => { - console.log(`\n๐Ÿ›‘ Received ${signal}, starting graceful shutdown...`); +const shutdown = async (signal: string) => { + logger.info(`Received ${signal}, starting graceful shutdown`); // Stop accepting new connections - httpServer.close(() => { - console.log('๐Ÿ“ด HTTP server closed'); + httpServer.close(async () => { + logger.info("HTTP server closed"); // Cleanup rate limiter rateLimitCleanup(); - console.log('๐Ÿงน Rate limiter cleanup completed'); + logger.info("Rate limiter cleanup completed"); + + // Close cache connection + await closeCache(); - console.log('โœ… Graceful shutdown completed'); + logger.info("Graceful shutdown completed"); process.exit(0); }); // Force shutdown after 10 seconds setTimeout(() => { - console.error('โŒ Forced shutdown after timeout'); + logger.error("Forced shutdown after timeout"); process.exit(1); }, 10000); }; -process.on('SIGTERM', () => shutdown('SIGTERM')); -process.on('SIGINT', () => shutdown('SIGINT')); +process.on("SIGTERM", () => shutdown("SIGTERM")); +process.on("SIGINT", () => shutdown("SIGINT")); httpServer.listen(port, () => { console.log(`๐Ÿš€ Typelets API v${VERSION} with WebSocket started at:`, new Date().toISOString()); @@ -368,5 +411,4 @@ httpServer.listen(port, () => { console.log(`๐Ÿ“ Max file size: ${maxFileSize}MB (body limit: ${maxBodySize}MB)`); console.log(`๐Ÿ’ฐ Free tier limits: ${freeStorageGB}GB storage, ${freeNoteLimit} notes`); console.log(`๐ŸŒ CORS origins:`, corsOrigins); - }); diff --git a/src/types/index.ts b/src/types/index.ts index a4b6967..c394e30 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -47,7 +47,7 @@ export interface UserUpdateData { } // Note with attachment count for list responses -export type NoteWithAttachmentCount = Omit & { +export type NoteWithAttachmentCount = Omit & { attachmentCount: number; folder?: Folder | null; }; diff --git a/src/version.ts b/src/version.ts index 4e929b4..25d2bd5 100644 --- a/src/version.ts +++ b/src/version.ts @@ -1,2 +1,2 @@ // This file is automatically updated by semantic-release -export const VERSION = '1.5.0' \ No newline at end of file +export const VERSION = '1.5.0' diff --git a/src/websocket/auth/handler.ts b/src/websocket/auth/handler.ts index 6262739..48bca67 100644 --- a/src/websocket/auth/handler.ts +++ b/src/websocket/auth/handler.ts @@ -1,7 +1,7 @@ import { verifyToken } from "@clerk/backend"; import { createHash, createHmac } from "crypto"; -import { AuthenticatedWebSocket, WebSocketMessage, WebSocketConfig } from '../types'; -import { ConnectionManager } from '../middleware/connection-manager'; +import { AuthenticatedWebSocket, WebSocketMessage, WebSocketConfig } from "../types"; +import { ConnectionManager } from "../middleware/connection-manager"; interface AuthenticatedMessage { payload: WebSocketMessage; @@ -24,7 +24,7 @@ export class AuthHandler { private cleanupOldNonces(): void { const now = Date.now(); - const fiveMinutesAgo = now - (5 * 60 * 1000); + const fiveMinutesAgo = now - 5 * 60 * 1000; // Remove expired nonces const noncesToDelete: string[] = []; @@ -35,7 +35,7 @@ export class AuthHandler { }); // Delete expired nonces - noncesToDelete.forEach(nonce => { + noncesToDelete.forEach((nonce) => { this.usedNonces.delete(nonce); }); @@ -44,19 +44,20 @@ export class AuthHandler { console.warn(`Nonce storage exceeded limit (${this.MAX_NONCES}), clearing all nonces`); this.usedNonces.clear(); } - } setupAuthTimeout(ws: AuthenticatedWebSocket): void { ws.authTimeout = setTimeout(() => { if (!ws.isAuthenticated) { - if (process.env.NODE_ENV === 'development') { + if (process.env.NODE_ENV === "development") { console.log("WebSocket connection closed due to authentication timeout"); } - ws.send(JSON.stringify({ - type: "error", - message: "Authentication timeout. Connection will be closed." - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Authentication timeout. Connection will be closed.", + }) + ); ws.close(); } }, this._config.authTimeoutMs); @@ -65,6 +66,7 @@ export class AuthHandler { async handleAuthentication(ws: AuthenticatedWebSocket, message: WebSocketMessage): Promise { try { if (!message.token) { + // noinspection ExceptionCaughtLocallyJS throw new Error("Token is required"); } @@ -76,10 +78,12 @@ export class AuthHandler { // Check connection limit before allowing authentication if (!this.connectionManager.checkConnectionLimit(userId)) { - ws.send(JSON.stringify({ - type: "error", - message: "Maximum connections exceeded for this user" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Maximum connections exceeded for this user", + }) + ); ws.close(); return; } @@ -88,7 +92,6 @@ export class AuthHandler { ws.isAuthenticated = true; ws.jwtToken = message.token; // Store JWT token for signature verification - // Clear authentication timeout since user is now authenticated if (ws.authTimeout) { clearTimeout(ws.authTimeout); @@ -99,36 +102,39 @@ export class AuthHandler { const timestamp = Date.now(); const flooredTimestamp = Math.floor(timestamp / 300000) * 300000; // 5-minute window - const sessionSecret = createHash('sha256') + const sessionSecret = createHash("sha256") .update(`${message.token}:${userId}:${flooredTimestamp}`) - .digest('hex'); - + .digest("hex"); // Store session secret for this connection (for reference only) ws.sessionSecret = sessionSecret; this.connectionManager.addUserConnection(userId, ws); - ws.send(JSON.stringify({ - type: "auth_success", - message: "Authentication successful", - userId: ws.userId, - sessionSecret: sessionSecret - })); + ws.send( + JSON.stringify({ + type: "auth_success", + message: "Authentication successful", + userId: ws.userId, + sessionSecret: sessionSecret, + }) + ); - if (process.env.NODE_ENV === 'development') { + if (process.env.NODE_ENV === "development") { console.log(`User ${ws.userId} authenticated via WebSocket`); } } catch (error: unknown) { console.error("WebSocket authentication failed:", error); - const isTokenExpired = (error as Record)?.reason === 'token-expired'; + const isTokenExpired = (error as Record)?.reason === "token-expired"; - ws.send(JSON.stringify({ - type: "auth_failed", - message: isTokenExpired ? "Token expired" : "Authentication failed", - reason: isTokenExpired ? "token-expired" : "auth-failed" - })); + ws.send( + JSON.stringify({ + type: "auth_failed", + message: isTokenExpired ? "Token expired" : "Authentication failed", + reason: isTokenExpired ? "token-expired" : "auth-failed", + }) + ); ws.close(); } } @@ -141,83 +147,68 @@ export class AuthHandler { * @param userId - The user ID for regenerating session secret * @returns Promise - True if signature is valid */ - async verifyMessageSignature(authMessage: AuthenticatedMessage, storedSessionSecret: string, jwtToken?: string, userId?: string): Promise { + async verifyMessageSignature( + authMessage: AuthenticatedMessage, + storedSessionSecret: string, + jwtToken?: string, + userId?: string + ): Promise { const { payload, signature, timestamp, nonce } = authMessage; // 1. Timestamp validation (5-minute window + 1 minute tolerance for clock skew) const messageAge = Date.now() - timestamp; const MAX_MESSAGE_AGE = 5 * 60 * 1000; // 5 minutes - if (messageAge > MAX_MESSAGE_AGE || messageAge < -60000) { // -60 seconds tolerance for clock skew - console.warn('Message rejected: timestamp out of range'); + if (messageAge > MAX_MESSAGE_AGE || messageAge < -60000) { + // -60 seconds tolerance for clock skew + console.warn("Message rejected: timestamp out of range"); return false; } // 2. Check for replay attack using nonce const nonceKey = `${nonce}:${timestamp}`; if (this.usedNonces.has(nonceKey)) { - console.warn('Message rejected: nonce already used (replay attack)'); + console.warn("Message rejected: nonce already used (replay attack)"); return false; } try { // 3. Validate required parameters if (!jwtToken || !userId) { - console.error('Missing JWT token or user ID for signature verification'); + console.error("Missing JWT token or user ID for signature verification"); return false; } - // 4. Regenerate session secret for this timestamp window (matching frontend exactly) const flooredTimestamp = Math.floor(timestamp / 300000) * 300000; const sessionSecretInput = `${jwtToken}:${userId}:${flooredTimestamp}`; - - const sessionSecret = createHash('sha256') - .update(sessionSecretInput, 'utf8') - .digest('hex'); - - // 5. FIXED: Use hex session secret directly as HMAC key (matching frontend) // Frontend uses the hex string directly, not converted to buffer - const hmacKey = sessionSecret; // Use hex string directly + const sessionSecret = createHash("sha256").update(sessionSecretInput, "utf8").digest("hex"); // 6. Create exact message data that was signed (order matters!) const messageToSign = { payload, timestamp, nonce }; const messageData = JSON.stringify(messageToSign); - // 7. Generate expected signature using hex string directly - const expectedSignature = createHmac('sha256', hmacKey) - .update(messageData, 'utf8') - .digest('base64'); + const expectedSignature = createHmac("sha256", sessionSecret) + .update(messageData, "utf8") + .digest("base64"); // 8. Test stored session secret with same approach - const storedSecretSignature = createHmac('sha256', storedSessionSecret) - .update(messageData, 'utf8') - .digest('base64'); - - // Test buffer conversion for comparison (kept for debugging) - const secretBuffer = Buffer.from(sessionSecret, 'hex'); - const _altSignature1 = createHmac('sha256', secretBuffer) - .update(messageData, 'utf8') - .digest('base64'); - - const _altSignature2 = createHmac('sha256', sessionSecret) - .update(messageData, 'utf8') - .digest('base64'); - + const storedSecretSignature = createHmac("sha256", storedSessionSecret) + .update(messageData, "utf8") + .digest("base64"); // 9. Compare signatures - check both regenerated and stored secret approaches const isValidRegenerated = expectedSignature === signature; const isValidStored = storedSecretSignature === signature; const isValid = isValidRegenerated || isValidStored; - - if (!isValid) { - console.warn('Message signature verification failed for user', userId); + console.warn("Message signature verification failed for user", userId); } else { - console.debug('Message signature verified successfully for user', userId); + console.debug("Message signature verified successfully for user", userId); } if (isValid) { @@ -227,7 +218,7 @@ export class AuthHandler { return isValid; } catch (error) { - console.error('Error verifying message signature:', error); + console.error("Error verifying message signature:", error); return false; } } @@ -238,19 +229,26 @@ export class AuthHandler { * @param rawMessage - The raw message (could be authenticated or plain) * @returns The extracted message payload or null if verification fails */ - async processIncomingMessage(ws: AuthenticatedWebSocket, rawMessage: unknown): Promise { + async processIncomingMessage( + ws: AuthenticatedWebSocket, + rawMessage: unknown + ): Promise { // Type guard to check if this is an authenticated message structure if (this.isAuthenticatedMessage(rawMessage)) { - // This is an authenticated message, verify signature if (!ws.sessionSecret) { - console.warn('Authenticated message received but no session secret available'); + console.warn("Authenticated message received but no session secret available"); return null; } - const isValid = await this.verifyMessageSignature(rawMessage, ws.sessionSecret, ws.jwtToken, ws.userId); + const isValid = await this.verifyMessageSignature( + rawMessage, + ws.sessionSecret, + ws.jwtToken, + ws.userId + ); if (!isValid) { - console.warn('Message signature verification failed for user', ws.userId); + console.warn("Message signature verification failed for user", ws.userId); return null; } @@ -258,7 +256,6 @@ export class AuthHandler { return rawMessage.payload; } - // Handle non-authenticated messages (backward compatibility) return rawMessage as WebSocketMessage; } @@ -267,20 +264,20 @@ export class AuthHandler { * Type guard to check if a message has authentication structure */ private isAuthenticatedMessage(message: unknown): message is AuthenticatedMessage { - if (typeof message !== 'object' || message === null) { + if (typeof message !== "object" || message === null) { return false; } const msg = message as Record; return ( - 'payload' in msg && - 'signature' in msg && - 'timestamp' in msg && - 'nonce' in msg && - typeof msg.signature === 'string' && - typeof msg.timestamp === 'number' && - typeof msg.nonce === 'string' + "payload" in msg && + "signature" in msg && + "timestamp" in msg && + "nonce" in msg && + typeof msg.signature === "string" && + typeof msg.timestamp === "number" && + typeof msg.nonce === "string" ); } -} \ No newline at end of file +} diff --git a/src/websocket/handlers/base.ts b/src/websocket/handlers/base.ts index 69a7a1f..223576b 100644 --- a/src/websocket/handlers/base.ts +++ b/src/websocket/handlers/base.ts @@ -1,7 +1,7 @@ import { db, notes, folders } from "../../db"; import { eq, and } from "drizzle-orm"; -import { AuthenticatedWebSocket, WebSocketMessage, ResourceOperationConfig } from '../types'; -import { ConnectionManager } from '../middleware/connection-manager'; +import { AuthenticatedWebSocket, WebSocketMessage, ResourceOperationConfig } from "../types"; +import { ConnectionManager } from "../middleware/connection-manager"; export class BaseResourceHandler { constructor(protected readonly _connectionManager: ConnectionManager) {} @@ -17,63 +17,74 @@ export class BaseResourceHandler { // Validate required fields if (!ws.userId || !resourceId || (config.dataField && !resourceData)) { const missingFields = []; - if (!ws.userId) missingFields.push('userId'); + if (!ws.userId) missingFields.push("userId"); if (!resourceId) missingFields.push(config.idField); if (config.dataField && !resourceData) missingFields.push(config.dataField); - ws.send(JSON.stringify({ - type: "error", - message: `Missing ${missingFields.join(', ')}` - })); + ws.send( + JSON.stringify({ + type: "error", + message: `Missing ${missingFields.join(", ")}`, + }) + ); return; } // Authorization check - MANDATORY for all operations except creation - if (config.operation !== 'created') { + if (config.operation !== "created") { if (!config.tableName) { - throw new Error(`Authorization required: tableName must be provided for ${config.operation} operations`); + throw new Error( + `Authorization required: tableName must be provided for ${config.operation} operations` + ); } try { let existingResource; - if (config.tableName === 'folders') { + if (config.tableName === "folders") { existingResource = await db.query.folders.findFirst({ where: and(eq(folders.id, resourceId), eq(folders.userId, ws.userId)), }); - } else if (config.tableName === 'notes') { + } else if (config.tableName === "notes") { existingResource = await db.query.notes.findFirst({ where: and(eq(notes.id, resourceId), eq(notes.userId, ws.userId)), }); } else { + // noinspection ExceptionCaughtLocallyJS throw new Error(`Unsupported table name: ${config.tableName}`); } if (!existingResource) { - ws.send(JSON.stringify({ - type: "error", - message: `${config.resourceType.charAt(0).toUpperCase() + config.resourceType.slice(1)} not found or access denied` - })); + ws.send( + JSON.stringify({ + type: "error", + message: `${config.resourceType.charAt(0).toUpperCase() + config.resourceType.slice(1)} not found or access denied`, + }) + ); return; } } catch (error) { console.error(`Error authorizing ${config.resourceType} ${config.operation}:`, error); - ws.send(JSON.stringify({ - type: "error", - message: `Failed to ${config.operation.replace('d', '')} ${config.resourceType}` - })); + ws.send( + JSON.stringify({ + type: "error", + message: `Failed to ${config.operation.replace("d", "")} ${config.resourceType}`, + }) + ); return; } } // For created operations, ensure the user owns the created resource - if (config.operation === 'created' && resourceData) { + if (config.operation === "created" && resourceData) { const createdByUserId = (resourceData as Record).userId; if (createdByUserId !== ws.userId) { - ws.send(JSON.stringify({ - type: "error", - message: "Access denied: Cannot broadcast resource created by another user" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Access denied: Cannot broadcast resource created by another user", + }) + ); return; } } @@ -84,13 +95,13 @@ export class BaseResourceHandler { const syncMessage: Record = { type: config.syncMessageType, timestamp: Date.now(), - fromUserId: ws.userId + fromUserId: ws.userId, }; // Add resource-specific data - if (config.operation === 'created' && resourceData && config.dataField) { + if (config.operation === "created" && resourceData && config.dataField) { syncMessage[config.dataField] = resourceData; - } else if (config.operation === 'updated') { + } else if (config.operation === "updated") { syncMessage[config.idField] = resourceId; syncMessage.changes = message.changes; @@ -100,7 +111,7 @@ export class BaseResourceHandler { if (updatedData) { syncMessage[updatedFieldName] = updatedData; } - } else if (config.operation === 'deleted') { + } else if (config.operation === "deleted") { syncMessage[config.idField] = resourceId; } @@ -108,4 +119,4 @@ export class BaseResourceHandler { const sentCount = this._connectionManager.broadcastToUserDevices(ws.userId, syncMessage, ws); console.log(`Broadcasted message to ${sentCount} devices for user ${ws.userId}`); } -} \ No newline at end of file +} diff --git a/src/websocket/handlers/folders.ts b/src/websocket/handlers/folders.ts index 76039dc..93e3945 100644 --- a/src/websocket/handlers/folders.ts +++ b/src/websocket/handlers/folders.ts @@ -1,6 +1,6 @@ -import { AuthenticatedWebSocket, WebSocketMessage } from '../types'; -import { ConnectionManager } from '../middleware/connection-manager'; -import { BaseResourceHandler } from './base'; +import { AuthenticatedWebSocket, WebSocketMessage } from "../types"; +import { ConnectionManager } from "../middleware/connection-manager"; +import { BaseResourceHandler } from "./base"; export class FolderHandler extends BaseResourceHandler { constructor(connectionManager: ConnectionManager) { @@ -9,37 +9,37 @@ export class FolderHandler extends BaseResourceHandler { async handleFolderCreated(ws: AuthenticatedWebSocket, message: WebSocketMessage): Promise { return this.handleResourceOperation(ws, message, { - resourceType: 'folder', - operation: 'created', - idField: 'folderId', - dataField: 'folderData', + resourceType: "folder", + operation: "created", + idField: "folderId", + dataField: "folderData", requiresAuth: false, - syncMessageType: 'folder_created_sync', - logAction: `created folder ${message.folderData?.id}` + syncMessageType: "folder_created_sync", + logAction: `created folder ${message.folderData?.id}`, }); } async handleFolderUpdated(ws: AuthenticatedWebSocket, message: WebSocketMessage): Promise { return this.handleResourceOperation(ws, message, { - resourceType: 'folder', - operation: 'updated', - idField: 'folderId', + resourceType: "folder", + operation: "updated", + idField: "folderId", requiresAuth: true, - tableName: 'folders', - syncMessageType: 'folder_updated_sync', - logAction: `updated folder ${message.folderId}` + tableName: "folders", + syncMessageType: "folder_updated_sync", + logAction: `updated folder ${message.folderId}`, }); } async handleFolderDeleted(ws: AuthenticatedWebSocket, message: WebSocketMessage): Promise { return this.handleResourceOperation(ws, message, { - resourceType: 'folder', - operation: 'deleted', - idField: 'folderId', + resourceType: "folder", + operation: "deleted", + idField: "folderId", requiresAuth: true, - tableName: 'folders', - syncMessageType: 'folder_deleted_sync', - logAction: `deleted folder ${message.folderId}` + tableName: "folders", + syncMessageType: "folder_deleted_sync", + logAction: `deleted folder ${message.folderId}`, }); } -} \ No newline at end of file +} diff --git a/src/websocket/handlers/notes.ts b/src/websocket/handlers/notes.ts index ac4d22a..31e933d 100644 --- a/src/websocket/handlers/notes.ts +++ b/src/websocket/handlers/notes.ts @@ -1,8 +1,8 @@ import { db, notes } from "../../db"; import { eq, and } from "drizzle-orm"; -import { AuthenticatedWebSocket, WebSocketMessage } from '../types'; -import { ConnectionManager } from '../middleware/connection-manager'; -import { BaseResourceHandler } from './base'; +import { AuthenticatedWebSocket, WebSocketMessage } from "../types"; +import { ConnectionManager } from "../middleware/connection-manager"; +import { BaseResourceHandler } from "./base"; export class NoteHandler extends BaseResourceHandler { constructor(connectionManager: ConnectionManager) { @@ -11,10 +11,12 @@ export class NoteHandler extends BaseResourceHandler { async handleJoinNote(ws: AuthenticatedWebSocket, message: WebSocketMessage): Promise { if (!message.noteId || !ws.userId) { - ws.send(JSON.stringify({ - type: "error", - message: "Missing noteId or userId" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Missing noteId or userId", + }) + ); return; } @@ -25,10 +27,12 @@ export class NoteHandler extends BaseResourceHandler { }); if (!existingNote) { - ws.send(JSON.stringify({ - type: "error", - message: "Note not found or access denied" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Note not found or access denied", + }) + ); return; } @@ -37,17 +41,21 @@ export class NoteHandler extends BaseResourceHandler { // Track this connection for the specific note this._connectionManager.addNoteConnection(message.noteId, ws); - ws.send(JSON.stringify({ - type: "note_joined", - noteId: message.noteId, - message: "Successfully joined note for real-time sync" - })); + ws.send( + JSON.stringify({ + type: "note_joined", + noteId: message.noteId, + message: "Successfully joined note for real-time sync", + }) + ); } catch (error) { console.error("Error joining note:", error); - ws.send(JSON.stringify({ - type: "error", - message: "Failed to join note" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Failed to join note", + }) + ); } } @@ -61,18 +69,22 @@ export class NoteHandler extends BaseResourceHandler { // Remove connection from note tracking this._connectionManager.removeNoteConnection(message.noteId, ws); - ws.send(JSON.stringify({ - type: "note_left", - noteId: message.noteId - })); + ws.send( + JSON.stringify({ + type: "note_left", + noteId: message.noteId, + }) + ); } async handleNoteUpdate(ws: AuthenticatedWebSocket, message: WebSocketMessage): Promise { if (!ws.userId || !message.noteId) { - ws.send(JSON.stringify({ - type: "error", - message: "Missing userId or noteId" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Missing userId or noteId", + }) + ); return; } @@ -83,31 +95,51 @@ export class NoteHandler extends BaseResourceHandler { }); if (!existingNote) { - ws.send(JSON.stringify({ - type: "error", - message: "Note not found or access denied" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Note not found or access denied", + }) + ); return; } // Apply the changes to the database if (message.changes && Object.keys(message.changes).length > 0) { - const allowedFields = ['title', 'content', 'encryptedTitle', 'encryptedContent', 'starred', 'archived', 'deleted', 'hidden', 'folderId']; + const allowedFields = [ + "title", + "content", + "encryptedTitle", + "encryptedContent", + "starred", + "archived", + "deleted", + "hidden", + "folderId", + ]; const filteredChanges: Record = {}; - Object.keys(message.changes).forEach(key => { + Object.keys(message.changes).forEach((key) => { if (allowedFields.includes(key)) { const value = (message.changes as Record)[key]; // Validate title and content fields must be [ENCRYPTED] - if ((key === 'title' || key === 'content') && typeof value === 'string' && value !== '[ENCRYPTED]') { - console.warn(`Note update: rejected plaintext ${key} for note ${message.noteId} - must be [ENCRYPTED]`); + if ( + (key === "title" || key === "content") && + typeof value === "string" && + value !== "[ENCRYPTED]" + ) { + console.warn( + `Note update: rejected plaintext ${key} for note ${message.noteId} - must be [ENCRYPTED]` + ); return; } filteredChanges[key] = value; } else { - console.warn(`Note update: filtered out disallowed field '${key}' for note ${message.noteId}`); + console.warn( + `Note update: filtered out disallowed field '${key}' for note ${message.noteId}` + ); } }); @@ -130,56 +162,69 @@ export class NoteHandler extends BaseResourceHandler { changes: filteredChanges, updatedNote, timestamp: Date.now(), - fromUserId: ws.userId + fromUserId: ws.userId, }; - const sentCount = this._connectionManager.broadcastToUserDevices(ws.userId, syncMessage, ws); + const sentCount = this._connectionManager.broadcastToUserDevices( + ws.userId, + syncMessage, + ws + ); console.log(`Broadcasted message to ${sentCount} devices for user ${ws.userId}`); // Send confirmation to the originating device - ws.send(JSON.stringify({ - type: "note_update_success", - noteId: message.noteId, - updatedNote, - timestamp: Date.now() - })); + ws.send( + JSON.stringify({ + type: "note_update_success", + noteId: message.noteId, + updatedNote, + timestamp: Date.now(), + }) + ); } else { - console.warn(`Note update: no valid changes found for note ${message.noteId}, original changes:`, message.changes); - ws.send(JSON.stringify({ - type: "error", - message: "No valid fields to update" - })); + console.warn( + `Note update: no valid changes found for note ${message.noteId}, original changes:`, + message.changes + ); + ws.send( + JSON.stringify({ + type: "error", + message: "No valid fields to update", + }) + ); } } } catch (error) { console.error("Error handling note update:", error); - ws.send(JSON.stringify({ - type: "error", - message: "Failed to update note" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Failed to update note", + }) + ); } } async handleNoteCreated(ws: AuthenticatedWebSocket, message: WebSocketMessage): Promise { return this.handleResourceOperation(ws, message, { - resourceType: 'note', - operation: 'created', - idField: 'noteId', - dataField: 'noteData', + resourceType: "note", + operation: "created", + idField: "noteId", + dataField: "noteData", requiresAuth: false, - syncMessageType: 'note_created_sync', - logAction: `created note ${message.noteData?.id}` + syncMessageType: "note_created_sync", + logAction: `created note ${message.noteData?.id}`, }); } async handleNoteDeleted(ws: AuthenticatedWebSocket, message: WebSocketMessage): Promise { return this.handleResourceOperation(ws, message, { - resourceType: 'note', - operation: 'deleted', - idField: 'noteId', - tableName: 'notes', - syncMessageType: 'note_deleted_sync', - logAction: `deleted note ${message.noteId}` + resourceType: "note", + operation: "deleted", + idField: "noteId", + tableName: "notes", + syncMessageType: "note_deleted_sync", + logAction: `deleted note ${message.noteId}`, }); } -} \ No newline at end of file +} diff --git a/src/websocket/index.ts b/src/websocket/index.ts index c9c9b02..e0015f0 100644 --- a/src/websocket/index.ts +++ b/src/websocket/index.ts @@ -1,11 +1,16 @@ import { WebSocketServer } from "ws"; import { Server } from "http"; -import { AuthenticatedWebSocket, WebSocketMessage, WebSocketConfig, ConnectionStats } from './types'; -import { RateLimiter } from './middleware/rate-limiter'; -import { ConnectionManager } from './middleware/connection-manager'; -import { AuthHandler } from './auth/handler'; -import { NoteHandler } from './handlers/notes'; -import { FolderHandler } from './handlers/folders'; +import { + AuthenticatedWebSocket, + WebSocketMessage, + WebSocketConfig, + ConnectionStats, +} from "./types"; +import { RateLimiter } from "./middleware/rate-limiter"; +import { ConnectionManager } from "./middleware/connection-manager"; +import { AuthHandler } from "./auth/handler"; +import { NoteHandler } from "./handlers/notes"; +import { FolderHandler } from "./handlers/folders"; export class WebSocketManager { private wss: WebSocketServer; @@ -22,7 +27,7 @@ export class WebSocketManager { rateLimitWindowMs: parseInt(process.env.WS_RATE_LIMIT_WINDOW_MS || "60000"), rateLimitMaxMessages: parseInt(process.env.WS_RATE_LIMIT_MAX_MESSAGES || "300"), // Increased from 60 to 300 maxConnectionsPerUser: parseInt(process.env.WS_MAX_CONNECTIONS_PER_USER || "20"), // Increased from 10 to 20 - authTimeoutMs: parseInt(process.env.WS_AUTH_TIMEOUT_MS || "30000") + authTimeoutMs: parseInt(process.env.WS_AUTH_TIMEOUT_MS || "30000"), }; this.rateLimiter = new RateLimiter(this.config); @@ -43,7 +48,7 @@ export class WebSocketManager { private setupWebSocketServer(): void { this.wss.on("connection", (ws: AuthenticatedWebSocket) => { const connectionStart = Date.now(); - if (process.env.NODE_ENV === 'development') { + if (process.env.NODE_ENV === "development") { console.log("New WebSocket connection established"); } @@ -61,20 +66,26 @@ export class WebSocketManager { // Message size validation (prevent DoS attacks) const maxMessageSize = 1024 * 1024; // 1MB limit if (data.length > maxMessageSize) { - ws.send(JSON.stringify({ - type: "error", - message: "Message too large. Maximum size is 1MB." - })); - console.warn(`WebSocket message too large: ${data.length} bytes from user ${ws.userId || 'unauthenticated'}`); + ws.send( + JSON.stringify({ + type: "error", + message: "Message too large. Maximum size is 1MB.", + }) + ); + console.warn( + `WebSocket message too large: ${data.length} bytes from user ${ws.userId || "unauthenticated"}` + ); return; } // Rate limiting check if (!this.rateLimiter.checkRateLimit(ws)) { - ws.send(JSON.stringify({ - type: "error", - message: "Rate limit exceeded. Please slow down." - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Rate limit exceeded. Please slow down.", + }) + ); return; } @@ -82,16 +93,17 @@ export class WebSocketManager { // Track WebSocket message processing performance const messageStart = Date.now(); - const messageType = (rawMessage as { type?: string })?.type || 'unknown'; // Process message with optional authentication verification const message = await this.authHandler.processIncomingMessage(ws, rawMessage); if (message === null) { - ws.send(JSON.stringify({ - type: "error", - message: "Message authentication failed" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Message authentication failed", + }) + ); return; } @@ -102,8 +114,8 @@ export class WebSocketManager { // Record WebSocket message metrics // Log WebSocket performance - const emoji = messageDuration > 2000 ? '๐ŸŒ' : messageDuration > 1000 ? 'โš ๏ธ' : 'โšก'; - if (process.env.NODE_ENV === 'development') { + const emoji = messageDuration > 2000 ? "๐ŸŒ" : messageDuration > 1000 ? "โš ๏ธ" : "โšก"; + if (process.env.NODE_ENV === "development") { console.log(`${emoji} WS: ${message.type} (${messageDuration}ms)`); } } catch (error) { @@ -112,19 +124,25 @@ export class WebSocketManager { // WebSocket metrics WebSocket message errors // Error tracking available via console logs - ws.send(JSON.stringify({ - type: "error", - message: "Invalid message format" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Invalid message format", + }) + ); } }); ws.on("close", (): void => { // Track connection duration - const connectionDuration = Date.now() - ((ws as WebSocket & { connectionStart?: number }).connectionStart || Date.now()); - - if (process.env.NODE_ENV === 'development') { - console.log(`๐Ÿ”Œ WebSocket disconnected (${Math.round(connectionDuration / 1000)}s session)`); + const connectionDuration = + Date.now() - + ((ws as WebSocket & { connectionStart?: number }).connectionStart || Date.now()); + + if (process.env.NODE_ENV === "development") { + console.log( + `๐Ÿ”Œ WebSocket disconnected (${Math.round(connectionDuration / 1000)}s session)` + ); } // Record WebSocket disconnection metrics @@ -140,14 +158,19 @@ export class WebSocketManager { // Error tracking available via console logs }); - ws.send(JSON.stringify({ - type: "connection_established", - message: "Please authenticate to continue" - })); + ws.send( + JSON.stringify({ + type: "connection_established", + message: "Please authenticate to continue", + }) + ); }); } - private async handleMessage(ws: AuthenticatedWebSocket, message: WebSocketMessage): Promise { + private async handleMessage( + ws: AuthenticatedWebSocket, + message: WebSocketMessage + ): Promise { switch (message.type) { case "auth": await this.authHandler.handleAuthentication(ws, message); @@ -156,10 +179,12 @@ export class WebSocketManager { if (ws.isAuthenticated) { await this.noteHandler.handleNoteUpdate(ws, message); } else { - ws.send(JSON.stringify({ - type: "error", - message: "Authentication required" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Authentication required", + }) + ); } break; case "join_note": @@ -198,16 +223,20 @@ export class WebSocketManager { } break; case "ping": - ws.send(JSON.stringify({ - type: "pong", - timestamp: Date.now() - })); + ws.send( + JSON.stringify({ + type: "pong", + timestamp: Date.now(), + }) + ); break; default: - ws.send(JSON.stringify({ - type: "error", - message: "Unknown message type" - })); + ws.send( + JSON.stringify({ + type: "error", + message: "Unknown message type", + }) + ); } } @@ -224,18 +253,25 @@ export class WebSocketManager { } // Public methods for server-triggered notifications - public notifyNoteUpdate(userId: string, noteId: string, changes: Record, updatedNote: Record): void { + public notifyNoteUpdate( + userId: string, + noteId: string, + changes: Record, + updatedNote: Record + ): void { const syncMessage = { type: "note_sync", noteId, changes, updatedNote, timestamp: Date.now(), - fromUserId: "server" + fromUserId: "server", }; const sentCount = this.connectionManager.broadcastToUserDevices(userId, syncMessage); - console.log(`Server notified ${sentCount} devices about note ${noteId} update for user ${userId}`); + console.log( + `Server notified ${sentCount} devices about note ${noteId} update for user ${userId}` + ); } public notifyNoteCreated(userId: string, noteData: Record): void { @@ -243,11 +279,13 @@ export class WebSocketManager { type: "note_created_sync", noteData, timestamp: Date.now(), - fromUserId: "server" + fromUserId: "server", }; const sentCount = this.connectionManager.broadcastToUserDevices(userId, createMessage); - console.log(`Server notified ${sentCount} devices about new note ${noteData.id} for user ${userId}`); + console.log( + `Server notified ${sentCount} devices about new note ${noteData.id} for user ${userId}` + ); } public notifyNoteDeleted(userId: string, noteId: string): void { @@ -255,11 +293,13 @@ export class WebSocketManager { type: "note_deleted_sync", noteId, timestamp: Date.now(), - fromUserId: "server" + fromUserId: "server", }; const sentCount = this.connectionManager.broadcastToUserDevices(userId, deleteMessage); - console.log(`Server notified ${sentCount} devices about note ${noteId} deletion for user ${userId}`); + console.log( + `Server notified ${sentCount} devices about note ${noteId} deletion for user ${userId}` + ); } public notifyFolderCreated(userId: string, folderData: Record): void { @@ -267,25 +307,34 @@ export class WebSocketManager { type: "folder_created_sync", folderData, timestamp: Date.now(), - fromUserId: "server" + fromUserId: "server", }; const sentCount = this.connectionManager.broadcastToUserDevices(userId, createMessage); - console.log(`Server notified ${sentCount} devices about new folder ${folderData.id} for user ${userId}`); + console.log( + `Server notified ${sentCount} devices about new folder ${folderData.id} for user ${userId}` + ); } - public notifyFolderUpdated(userId: string, folderId: string, changes: Record, updatedFolder: Record): void { + public notifyFolderUpdated( + userId: string, + folderId: string, + changes: Record, + updatedFolder: Record + ): void { const updateMessage = { type: "folder_updated_sync", folderId, changes, updatedFolder, timestamp: Date.now(), - fromUserId: "server" + fromUserId: "server", }; const sentCount = this.connectionManager.broadcastToUserDevices(userId, updateMessage); - console.log(`Server notified ${sentCount} devices about folder ${folderId} update for user ${userId}`); + console.log( + `Server notified ${sentCount} devices about folder ${folderId} update for user ${userId}` + ); } public notifyFolderDeleted(userId: string, folderId: string): void { @@ -293,14 +342,16 @@ export class WebSocketManager { type: "folder_deleted_sync", folderId, timestamp: Date.now(), - fromUserId: "server" + fromUserId: "server", }; const sentCount = this.connectionManager.broadcastToUserDevices(userId, deleteMessage); - console.log(`Server notified ${sentCount} devices about folder ${folderId} deletion for user ${userId}`); + console.log( + `Server notified ${sentCount} devices about folder ${folderId} deletion for user ${userId}` + ); } } // Export the class and types for external use -export * from './types'; -export { WebSocketManager as default }; \ No newline at end of file +export * from "./types"; +export { WebSocketManager as default }; diff --git a/src/websocket/middleware/connection-manager.ts b/src/websocket/middleware/connection-manager.ts index 01b8656..b78edc9 100644 --- a/src/websocket/middleware/connection-manager.ts +++ b/src/websocket/middleware/connection-manager.ts @@ -1,5 +1,5 @@ -import { WebSocket } from 'ws'; -import { AuthenticatedWebSocket, WebSocketConfig, ConnectionStats } from '../types'; +import { WebSocket } from "ws"; +import { AuthenticatedWebSocket, WebSocketConfig, ConnectionStats } from "../types"; export class ConnectionManager { private userConnections = new Map>(); @@ -49,7 +49,7 @@ export class ConnectionManager { // Clean up closed connections first const activeConnections = Array.from(userConnections).filter( - conn => conn.readyState === WebSocket.OPEN + (conn) => conn.readyState === WebSocket.OPEN ); // Update the set with only active connections @@ -60,12 +60,16 @@ export class ConnectionManager { return activeConnections.length < this._config.maxConnectionsPerUser; } - broadcastToUserDevices(userId: string, message: Record, excludeWs?: AuthenticatedWebSocket): number { + broadcastToUserDevices( + userId: string, + message: Record, + excludeWs?: AuthenticatedWebSocket + ): number { const connections = this.userConnections.get(userId); if (!connections) return 0; let sentCount = 0; - connections.forEach(conn => { + connections.forEach((conn) => { if (conn !== excludeWs && conn.readyState === WebSocket.OPEN) { conn.send(JSON.stringify(message)); sentCount++; @@ -101,10 +105,12 @@ export class ConnectionManager { return { totalConnections, authenticatedUsers: this.userConnections.size, - connectionsPerUser: Array.from(this.userConnections.entries()).map(([userId, connections]) => ({ - userId, - deviceCount: connections.size - })) + connectionsPerUser: Array.from(this.userConnections.entries()).map( + ([userId, connections]) => ({ + userId, + deviceCount: connections.size, + }) + ), }; } -} \ No newline at end of file +} diff --git a/src/websocket/middleware/rate-limiter.ts b/src/websocket/middleware/rate-limiter.ts index fbb2c1c..25e1c6d 100644 --- a/src/websocket/middleware/rate-limiter.ts +++ b/src/websocket/middleware/rate-limiter.ts @@ -1,4 +1,4 @@ -import { AuthenticatedWebSocket, WebSocketConfig } from '../types'; +import { AuthenticatedWebSocket, WebSocketConfig } from "../types"; export class RateLimiter { constructor(private readonly _config: WebSocketConfig) {} @@ -10,7 +10,7 @@ export class RateLimiter { if (!ws.rateLimit) { ws.rateLimit = { count: 1, - windowStart: now + windowStart: now, }; return true; } @@ -20,7 +20,7 @@ export class RateLimiter { if (windowElapsed >= this._config.rateLimitWindowMs) { ws.rateLimit = { count: 1, - windowStart: now + windowStart: now, }; return true; } @@ -34,4 +34,4 @@ export class RateLimiter { ws.rateLimit.count++; return true; } -} \ No newline at end of file +} diff --git a/src/websocket/types.ts b/src/websocket/types.ts index bdbf993..98aa395 100644 --- a/src/websocket/types.ts +++ b/src/websocket/types.ts @@ -1,4 +1,4 @@ -import { WebSocket } from 'ws'; +import { WebSocket } from "ws"; export interface RateLimitInfo { count: number; @@ -28,12 +28,12 @@ export interface WebSocketMessage { } export interface ResourceOperationConfig { - resourceType: 'folder' | 'note'; - operation: 'created' | 'updated' | 'deleted'; + resourceType: "folder" | "note"; + operation: "created" | "updated" | "deleted"; idField: string; dataField?: string; requiresAuth?: boolean; - tableName?: 'folders' | 'notes'; + tableName?: "folders" | "notes"; syncMessageType: string; logAction: string; } @@ -52,4 +52,4 @@ export interface WebSocketConfig { rateLimitMaxMessages: number; maxConnectionsPerUser: number; authTimeoutMs: number; -} \ No newline at end of file +}