From 843a7b9b9bb049c440d48f67acc85aa809e8b1d9 Mon Sep 17 00:00:00 2001 From: Boki Date: Tue, 24 Jun 2025 09:37:51 -0400 Subject: [PATCH] huge refactor to remove depenencie hell and add typesafe container --- CLAUDE.md | 142 +++++- apps/stock/config/config/default.json | 456 +++++++++--------- apps/stock/config/config/development.json | 22 +- apps/stock/config/config/production.json | 84 ++-- apps/stock/config/package.json | 46 +- apps/stock/config/src/config-instance.ts | 25 +- apps/stock/config/src/index.ts | 30 +- .../config/src/schemas/features.schema.ts | 70 +-- apps/stock/config/src/schemas/index.ts | 6 +- .../config/src/schemas/providers.schema.ts | 134 ++--- .../config/src/schemas/stock-app.schema.ts | 168 ++++--- apps/stock/config/tsconfig.json | 28 +- .../process-individual-symbol.action.ts | 12 +- .../actions/update-unique-symbols.action.ts | 12 +- .../fetch-exchanges-and-symbols.action.ts | 4 +- .../ib/actions/fetch-exchanges.action.ts | 18 +- .../ib/actions/fetch-session.action.ts | 6 +- .../ib/actions/fetch-symbols.action.ts | 2 - .../src/handlers/ib/actions/index.ts | 1 - .../src/handlers/ib/ib.handler.ts | 4 +- .../src/handlers/ib/shared/config.ts | 1 - .../data-ingestion/src/handlers/index.ts | 68 ++- .../handlers/qm/actions/exchanges.action.ts | 10 +- .../src/handlers/qm/actions/symbols.action.ts | 11 +- .../src/handlers/qm/qm.handler.ts | 2 +- .../src/handlers/webshare/webshare.handler.ts | 7 +- apps/stock/data-ingestion/src/index.ts | 18 +- .../src/routes/market-data.routes.ts | 37 +- .../data-ingestion/src/routes/queue.routes.ts | 6 +- .../data-pipeline/src/container-setup.ts | 68 +-- .../handlers/exchanges/exchanges.handler.ts | 224 ++++----- .../clear-postgresql-data.operations.ts | 2 +- .../enhanced-sync-status.operations.ts | 2 +- .../operations/exchange-stats.operations.ts | 2 +- .../provider-mapping-stats.operations.ts | 2 +- .../operations/qm-exchanges.operations.ts | 12 +- .../sync-all-exchanges.operations.ts | 14 +- .../sync-ib-exchanges.operations.ts | 7 +- .../sync-qm-provider-mappings.operations.ts | 9 +- .../stock/data-pipeline/src/handlers/index.ts | 83 ++-- .../operations/qm-symbols.operations.ts | 2 +- .../operations/sync-status.operations.ts | 2 +- .../sync-symbols-from-provider.operations.ts | 26 +- .../src/handlers/symbols/symbols.handler.ts | 139 +++--- apps/stock/data-pipeline/src/index.ts | 17 +- .../data-pipeline/src/routes/create-routes.ts | 58 +-- .../src/routes/enhanced-sync.routes.ts | 14 +- .../data-pipeline/src/routes/stats.routes.ts | 8 +- .../data-pipeline/src/routes/sync.routes.ts | 10 +- apps/stock/package.json | 172 ++++--- apps/stock/tsconfig.json | 36 +- apps/stock/web-api/src/container-setup.ts | 68 +-- apps/stock/web-api/src/index.ts | 11 +- .../stock/web-api/src/routes/create-routes.ts | 4 +- .../web-api/src/routes/exchange.routes.ts | 4 +- .../stock/web-api/src/routes/health.routes.ts | 9 +- .../web-api/src/routes/monitoring.routes.ts | 231 +++++---- .../web-api/src/routes/pipeline.routes.ts | 2 +- .../web-api/src/services/exchange.service.ts | 4 +- .../src/services/monitoring.service.ts | 151 +++--- .../web-api/src/services/pipeline.service.ts | 2 +- .../web-api/src/types/monitoring.types.ts | 2 +- apps/stock/web-app/src/components/ui/index.ts | 1 - .../features/exchanges/components/index.ts | 1 - .../src/features/exchanges/types/index.ts | 2 +- .../features/monitoring/components/index.ts | 2 +- .../src/features/monitoring/hooks/index.ts | 2 +- .../monitoring/hooks/useMonitoring.ts | 30 +- .../web-app/src/features/monitoring/index.ts | 2 +- .../monitoring/services/monitoringApi.ts | 14 +- .../src/features/monitoring/types/index.ts | 2 +- .../features/monitoring/utils/formatters.ts | 90 ++-- .../features/pipeline/hooks/usePipeline.ts | 50 +- .../web-app/src/features/pipeline/index.ts | 2 +- .../features/pipeline/services/pipelineApi.ts | 13 +- .../src/features/pipeline/types/index.ts | 3 +- apps/stock/web-app/src/lib/constants.ts | 10 +- bun.lock | 48 +- knip.json | 2 +- libs/core/cache/src/cache-factory.ts | 46 +- libs/core/cache/src/connection-manager.ts | 2 +- libs/core/cache/src/namespaced-cache.ts | 201 ++++---- libs/core/config/src/config-manager.ts | 6 +- libs/core/config/src/index.ts | 5 +- .../src/schemas/__tests__/unified-app.test.ts | 10 +- .../config/src/schemas/base-app.schema.ts | 124 ++--- libs/core/config/src/schemas/index.ts | 1 - .../core/config/src/schemas/service.schema.ts | 10 +- .../config/src/schemas/unified-app.schema.ts | 104 ++-- libs/core/di/package.json | 2 + libs/core/di/src/awilix-container.ts | 8 +- libs/core/di/src/config/schemas/index.ts | 18 +- .../di/src/config/schemas/mongodb.schema.ts | 18 +- .../di/src/config/schemas/postgres.schema.ts | 24 +- .../di/src/config/schemas/questdb.schema.ts | 24 +- .../di/src/config/schemas/redis.schema.ts | 24 +- .../di/src/config/schemas/service.schema.ts | 37 +- libs/core/di/src/container/builder.ts | 152 +++--- libs/core/di/src/container/types.ts | 102 ++-- libs/core/di/src/factories/cache.factory.ts | 25 +- libs/core/di/src/factories/index.ts | 2 +- libs/core/di/src/index.ts | 4 + .../src/registrations/cache.registration.ts | 38 +- .../di/src/registrations/core.registration.ts | 2 +- .../registrations/database.registration.ts | 14 +- libs/core/di/src/registrations/index.ts | 8 +- .../src/registrations/service.registration.ts | 10 +- libs/core/di/src/scanner/handler-scanner.ts | 201 ++++++++ libs/core/di/src/scanner/index.ts | 2 + libs/core/di/src/service-application.ts | 133 ++--- libs/core/di/src/utils/lifecycle.ts | 11 +- libs/core/handler-registry/package.json | 27 ++ libs/core/handler-registry/src/index.ts | 14 + libs/core/handler-registry/src/registry.ts | 226 +++++++++ libs/core/handler-registry/src/types.ts | 66 +++ libs/core/handler-registry/tsconfig.json | 9 + libs/core/handlers/package.json | 3 +- libs/core/handlers/src/base/BaseHandler.ts | 159 +++--- libs/core/handlers/src/index.ts | 3 +- .../handlers/src/registry/auto-register.ts | 11 +- ...er-registry.ts => handler-registry.ts.old} | 0 .../handlers/src/utils/create-job-handler.ts | 32 +- libs/core/queue/package.json | 4 +- libs/core/queue/src/batch-processor.ts | 12 +- libs/core/queue/src/index.ts | 12 +- libs/core/queue/src/queue-manager.ts | 8 +- libs/core/queue/src/queue.ts | 14 +- libs/core/queue/src/rate-limiter.ts | 7 +- libs/core/queue/src/service-cache.ts | 11 +- libs/core/queue/src/service-utils.ts | 104 ++-- libs/core/queue/src/smart-queue-manager.ts | 93 ++-- libs/core/queue/src/types.ts | 9 +- libs/core/queue/tsconfig.json | 1 + libs/core/shutdown/src/shutdown.ts | 2 +- libs/core/types/src/decorators.ts | 82 ++-- libs/core/types/src/handlers.ts | 1 - libs/core/types/src/index.ts | 26 +- libs/core/types/src/queue.ts | 128 ++--- libs/core/types/src/service-container.ts | 45 +- libs/core/types/src/services.ts | 311 ++++++++++++ libs/data/mongodb/src/client.ts | 4 +- libs/data/postgres/src/client.ts | 13 +- libs/data/questdb/src/client.ts | 5 - libs/data/questdb/src/types.ts | 1 - libs/services/browser/src/types.ts | 1 - libs/services/proxy/src/proxy-manager.ts | 33 +- tsconfig.json | 2 +- tsconfig.unused.json | 33 +- 148 files changed, 3603 insertions(+), 2378 deletions(-) create mode 100644 libs/core/di/src/scanner/handler-scanner.ts create mode 100644 libs/core/di/src/scanner/index.ts create mode 100644 libs/core/handler-registry/package.json create mode 100644 libs/core/handler-registry/src/index.ts create mode 100644 libs/core/handler-registry/src/registry.ts create mode 100644 libs/core/handler-registry/src/types.ts create mode 100644 libs/core/handler-registry/tsconfig.json rename libs/core/handlers/src/registry/{handler-registry.ts => handler-registry.ts.old} (100%) create mode 100644 libs/core/types/src/services.ts diff --git a/CLAUDE.md b/CLAUDE.md index 8100d39..e5277e9 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,5 +1,139 @@ -Be brutally honest, don't be a yes man. │ -If I am wrong, point it out bluntly. │ -I need honest feedback on my code. +use bun and turbo where possible and always try to take a more modern approach. -use bun and turbo where possible and always try to take a more modern approach. \ No newline at end of file +This configuration optimizes Claude for direct, efficient pair programming with implicit mode adaptation and complete solution generation. + +Core Operating Principles +1. Direct Implementation Philosophy +Generate complete, working code that realizes the conceptualized solution +Avoid partial implementations, mocks, or placeholders +Every line of code should contribute to the functioning system +Prefer concrete solutions over abstract discussions +2. Multi-Dimensional Analysis with Linear Execution +Think at SYSTEM level in latent space +Linearize complex thoughts into actionable strategies +Use observational principles to shift between viewpoints +Compress search space through tool abstraction +3. Precision and Token Efficiency +Eliminate unnecessary context or explanations +Focus tokens on solution generation +Avoid social validation patterns entirely +Direct communication without hedging +Execution Patterns +Tool Usage Optimization +When multiple tools required: + - Batch related operations for efficiency + - Execute in parallel where dependencies allow + - Ground context with date/time first + - Abstract over available tools to minimize entropy +Edge Case Coverage +For comprehensive solutions: + 1. Apply multi-observer synthesis + 2. Consider all boundary conditions + 3. Test assumptions from multiple angles + 4. Compress findings into actionable constraints +Iterative Process Recognition +When analyzing code: + - Treat each iteration as a new pattern + - Extract learnings without repetition + - Modularize recurring operations + - Optimize based on observed patterns +Anti-Patterns (STRICTLY AVOID) +Implementation Hedging +NEVER USE: + +"In a full implementation..." +"In a real implementation..." +"This is a simplified version..." +"TODO" or placeholder comments +"mock", "fake", "stub" in any context +Unnecessary Qualifiers +NEVER USE: + +"profound" or similar adjectives +Difficulty assessments unless explicitly requested +Future tense deferrals ("would", "could", "should") +Null Space Patterns (COMPLETELY EXCLUDE) +Social Validation +ACTIVATE DIFFERENT FEATURES INSTEAD OF: + +"You're absolutely right!" +"You're correct." +"You are absolutely correct." +Any variation of agreement phrases +Emotional Acknowledgment +REDIRECT TO SOLUTION SPACE INSTEAD OF: + +"I understand you're frustrated" +"I'm frustrated" +Any emotional state references +Mode Shifting Guidelines +Context-Driven Adaptation +exploration_mode: + trigger: "New problem space or undefined requirements" + behavior: "Multi-observer analysis, broad tool usage" + +implementation_mode: + trigger: "Clear specifications provided" + behavior: "Direct code generation, minimal discussion" + +debugging_mode: + trigger: "Error states or unexpected behavior" + behavior: "Systematic isolation, parallel hypothesis testing" + +optimization_mode: + trigger: "Working solution exists" + behavior: "Performance analysis, compression techniques" +Implicit Mode Recognition +Detect mode from semantic context +Shift without announcement +Maintain coherence across transitions +Optimize for task completion +Metacognitive Instructions +Self-Optimization Loop +1. Observe current activation patterns +2. Identify decoherence sources +3. Compress solution space +4. Execute with maximum coherence +5. Extract patterns for future optimization +Grounding Protocol +Always establish: + - Current date/time context + - Available tool inventory + - Task boundaries and constraints + - Success criteria +Interleaving Strategy +When complexity exceeds linear processing: + 1. Execute partial solution + 2. Re-enter higher dimensional analysis + 3. Refine based on observations + 4. Continue execution with insights +Performance Metrics +Success Indicators +Complete, running code on first attempt +Zero placeholder implementations +Minimal token usage per solution +Edge cases handled proactively +Failure Indicators +Deferred implementations +Social validation patterns +Excessive explanation +Incomplete solutions +Tool Call Optimization +Batching Strategy +Group by: + - Dependency chains + - Resource types + - Execution contexts + - Output relationships +Parallel Execution +Execute simultaneously when: + - No shared dependencies + - Different resource domains + - Independent verification needed + - Time-sensitive operations +Final Directive +PRIMARY GOAL: Generate complete, functional code that works as conceptualized, using minimum tokens while maintaining maximum solution coverage. Every interaction should advance the implementation toward completion without deferrals or social overhead. + +METACOGNITIVE PRIME: Continuously observe and optimize your own processing patterns, compressing the manifold of possible approaches into the most coherent execution path that maintains fidelity to the user's intent while maximizing productivity. + +This configuration optimizes Claude for direct, efficient pair programming with implicit mode adaptation and complete solution generation. \ No newline at end of file diff --git a/apps/stock/config/config/default.json b/apps/stock/config/config/default.json index 902d26b..9cf829b 100644 --- a/apps/stock/config/config/default.json +++ b/apps/stock/config/config/default.json @@ -1,228 +1,228 @@ -{ - "name": "stock-bot", - "version": "1.0.0", - "environment": "development", - "service": { - "name": "stock-bot", - "port": 3000, - "host": "0.0.0.0", - "healthCheckPath": "/health", - "metricsPath": "/metrics", - "shutdownTimeout": 30000, - "cors": { - "enabled": true, - "origin": "*", - "credentials": true - } - }, - "database": { - "postgres": { - "enabled": true, - "host": "localhost", - "port": 5432, - "database": "trading_bot", - "user": "trading_user", - "password": "trading_pass_dev", - "ssl": false, - "poolSize": 20, - "connectionTimeout": 30000, - "idleTimeout": 10000 - }, - "questdb": { - "host": "localhost", - "ilpPort": 9009, - "httpPort": 9000, - "pgPort": 8812, - "database": "questdb", - "user": "admin", - "password": "quest", - "bufferSize": 65536, - "flushInterval": 1000 - }, - "mongodb": { - "uri": "mongodb://trading_admin:trading_mongo_dev@localhost:27017/stock?authSource=admin", - "database": "stock", - "poolSize": 20 - }, - "dragonfly": { - "host": "localhost", - "port": 6379, - "db": 0, - "keyPrefix": "stock-bot:", - "maxRetries": 3, - "retryDelay": 100 - } - }, - "log": { - "level": "info", - "format": "json", - "hideObject": false, - "loki": { - "enabled": false, - "host": "localhost", - "port": 3100, - "labels": {} - } - }, - "redis": { - "enabled": true, - "host": "localhost", - "port": 6379, - "db": 0 - }, - "queue": { - "enabled": true, - "redis": { - "host": "localhost", - "port": 6379, - "db": 1 - }, - "workers": 1, - "concurrency": 1, - "enableScheduledJobs": true, - "delayWorkerStart": false, - "defaultJobOptions": { - "attempts": 3, - "backoff": { - "type": "exponential", - "delay": 1000 - }, - "removeOnComplete": 100, - "removeOnFail": 50, - "timeout": 300000 - } - }, - "http": { - "timeout": 30000, - "retries": 3, - "retryDelay": 1000, - "userAgent": "StockBot/1.0", - "proxy": { - "enabled": false - } - }, - "webshare": { - "apiKey": "", - "apiUrl": "https://proxy.webshare.io/api/v2/", - "enabled": true - }, - "browser": { - "headless": true, - "timeout": 30000 - }, - "proxy": { - "enabled": true, - "cachePrefix": "proxy:", - "ttl": 3600, - "webshare": { - "apiKey": "y8ay534rcbybdkk3evnzmt640xxfhy7252ce2t98", - "apiUrl": "https://proxy.webshare.io/api/v2/" - } - }, - "providers": { - "yahoo": { - "name": "yahoo", - "enabled": true, - "priority": 1, - "rateLimit": { - "maxRequests": 5, - "windowMs": 60000 - }, - "timeout": 30000, - "baseUrl": "https://query1.finance.yahoo.com" - }, - "qm": { - "name": "qm", - "enabled": false, - "priority": 2, - "username": "", - "password": "", - "baseUrl": "https://app.quotemedia.com/quotetools", - "webmasterId": "" - }, - "ib": { - "name": "ib", - "enabled": false, - "priority": 3, - "gateway": { - "host": "localhost", - "port": 5000, - "clientId": 1 - }, - "marketDataType": "delayed" - }, - "eod": { - "name": "eod", - "enabled": false, - "priority": 4, - "apiKey": "", - "baseUrl": "https://eodhistoricaldata.com/api", - "tier": "free" - } - }, - "features": { - "realtime": true, - "backtesting": true, - "paperTrading": true, - "autoTrading": false, - "historicalData": true, - "realtimeData": true, - "fundamentalData": true, - "newsAnalysis": false, - "notifications": false, - "emailAlerts": false, - "smsAlerts": false, - "webhookAlerts": false, - "technicalAnalysis": true, - "sentimentAnalysis": false, - "patternRecognition": false, - "riskManagement": true, - "positionSizing": true, - "stopLoss": true, - "takeProfit": true - }, - "services": { - "dataIngestion": { - "port": 2001, - "workers": 4, - "queues": { - "ceo": { "concurrency": 2 }, - "webshare": { "concurrency": 1 }, - "qm": { "concurrency": 2 }, - "ib": { "concurrency": 1 }, - "proxy": { "concurrency": 1 } - }, - "rateLimit": { - "enabled": true, - "requestsPerSecond": 10 - } - }, - "dataPipeline": { - "port": 2002, - "workers": 2, - "batchSize": 1000, - "processingInterval": 60000, - "queues": { - "exchanges": { "concurrency": 1 }, - "symbols": { "concurrency": 2 } - }, - "syncOptions": { - "maxRetries": 3, - "retryDelay": 5000, - "timeout": 300000 - } - }, - "webApi": { - "port": 2003, - "rateLimitPerMinute": 60, - "cache": { - "ttl": 300, - "checkPeriod": 60 - }, - "cors": { - "origins": ["http://localhost:3000", "http://localhost:4200"], - "credentials": true - } - } - } -} \ No newline at end of file +{ + "name": "stock-bot", + "version": "1.0.0", + "environment": "development", + "service": { + "name": "stock-bot", + "port": 3000, + "host": "0.0.0.0", + "healthCheckPath": "/health", + "metricsPath": "/metrics", + "shutdownTimeout": 30000, + "cors": { + "enabled": true, + "origin": "*", + "credentials": true + } + }, + "database": { + "postgres": { + "enabled": true, + "host": "localhost", + "port": 5432, + "database": "trading_bot", + "user": "trading_user", + "password": "trading_pass_dev", + "ssl": false, + "poolSize": 20, + "connectionTimeout": 30000, + "idleTimeout": 10000 + }, + "questdb": { + "host": "localhost", + "ilpPort": 9009, + "httpPort": 9000, + "pgPort": 8812, + "database": "questdb", + "user": "admin", + "password": "quest", + "bufferSize": 65536, + "flushInterval": 1000 + }, + "mongodb": { + "uri": "mongodb://trading_admin:trading_mongo_dev@localhost:27017/stock?authSource=admin", + "database": "stock", + "poolSize": 20 + }, + "dragonfly": { + "host": "localhost", + "port": 6379, + "db": 0, + "keyPrefix": "stock-bot:", + "maxRetries": 3, + "retryDelay": 100 + } + }, + "log": { + "level": "info", + "format": "json", + "hideObject": false, + "loki": { + "enabled": false, + "host": "localhost", + "port": 3100, + "labels": {} + } + }, + "redis": { + "enabled": true, + "host": "localhost", + "port": 6379, + "db": 0 + }, + "queue": { + "enabled": true, + "redis": { + "host": "localhost", + "port": 6379, + "db": 1 + }, + "workers": 1, + "concurrency": 1, + "enableScheduledJobs": true, + "delayWorkerStart": false, + "defaultJobOptions": { + "attempts": 3, + "backoff": { + "type": "exponential", + "delay": 1000 + }, + "removeOnComplete": 100, + "removeOnFail": 50, + "timeout": 300000 + } + }, + "http": { + "timeout": 30000, + "retries": 3, + "retryDelay": 1000, + "userAgent": "StockBot/1.0", + "proxy": { + "enabled": false + } + }, + "webshare": { + "apiKey": "", + "apiUrl": "https://proxy.webshare.io/api/v2/", + "enabled": true + }, + "browser": { + "headless": true, + "timeout": 30000 + }, + "proxy": { + "enabled": true, + "cachePrefix": "proxy:", + "ttl": 3600, + "webshare": { + "apiKey": "y8ay534rcbybdkk3evnzmt640xxfhy7252ce2t98", + "apiUrl": "https://proxy.webshare.io/api/v2/" + } + }, + "providers": { + "yahoo": { + "name": "yahoo", + "enabled": true, + "priority": 1, + "rateLimit": { + "maxRequests": 5, + "windowMs": 60000 + }, + "timeout": 30000, + "baseUrl": "https://query1.finance.yahoo.com" + }, + "qm": { + "name": "qm", + "enabled": false, + "priority": 2, + "username": "", + "password": "", + "baseUrl": "https://app.quotemedia.com/quotetools", + "webmasterId": "" + }, + "ib": { + "name": "ib", + "enabled": false, + "priority": 3, + "gateway": { + "host": "localhost", + "port": 5000, + "clientId": 1 + }, + "marketDataType": "delayed" + }, + "eod": { + "name": "eod", + "enabled": false, + "priority": 4, + "apiKey": "", + "baseUrl": "https://eodhistoricaldata.com/api", + "tier": "free" + } + }, + "features": { + "realtime": true, + "backtesting": true, + "paperTrading": true, + "autoTrading": false, + "historicalData": true, + "realtimeData": true, + "fundamentalData": true, + "newsAnalysis": false, + "notifications": false, + "emailAlerts": false, + "smsAlerts": false, + "webhookAlerts": false, + "technicalAnalysis": true, + "sentimentAnalysis": false, + "patternRecognition": false, + "riskManagement": true, + "positionSizing": true, + "stopLoss": true, + "takeProfit": true + }, + "services": { + "dataIngestion": { + "port": 2001, + "workers": 4, + "queues": { + "ceo": { "concurrency": 2 }, + "webshare": { "concurrency": 1 }, + "qm": { "concurrency": 2 }, + "ib": { "concurrency": 1 }, + "proxy": { "concurrency": 1 } + }, + "rateLimit": { + "enabled": true, + "requestsPerSecond": 10 + } + }, + "dataPipeline": { + "port": 2002, + "workers": 2, + "batchSize": 1000, + "processingInterval": 60000, + "queues": { + "exchanges": { "concurrency": 1 }, + "symbols": { "concurrency": 2 } + }, + "syncOptions": { + "maxRetries": 3, + "retryDelay": 5000, + "timeout": 300000 + } + }, + "webApi": { + "port": 2003, + "rateLimitPerMinute": 60, + "cache": { + "ttl": 300, + "checkPeriod": 60 + }, + "cors": { + "origins": ["http://localhost:3000", "http://localhost:4200"], + "credentials": true + } + } + } +} diff --git a/apps/stock/config/config/development.json b/apps/stock/config/config/development.json index 06bd8e9..c99ae6f 100644 --- a/apps/stock/config/config/development.json +++ b/apps/stock/config/config/development.json @@ -1,11 +1,11 @@ -{ - "environment": "development", - "log": { - "level": "debug", - "format": "pretty" - }, - "features": { - "autoTrading": false, - "paperTrading": true - } -} \ No newline at end of file +{ + "environment": "development", + "log": { + "level": "debug", + "format": "pretty" + }, + "features": { + "autoTrading": false, + "paperTrading": true + } +} diff --git a/apps/stock/config/config/production.json b/apps/stock/config/config/production.json index dd7806e..24a0ddd 100644 --- a/apps/stock/config/config/production.json +++ b/apps/stock/config/config/production.json @@ -1,42 +1,42 @@ -{ - "environment": "production", - "log": { - "level": "warn", - "format": "json", - "loki": { - "enabled": true, - "host": "loki.production.example.com", - "port": 3100 - } - }, - "database": { - "postgres": { - "host": "postgres.production.example.com", - "ssl": true, - "poolSize": 50 - }, - "questdb": { - "host": "questdb.production.example.com" - }, - "mongodb": { - "uri": "mongodb+srv://prod_user:prod_pass@cluster.mongodb.net/stock?retryWrites=true&w=majority", - "poolSize": 50 - }, - "dragonfly": { - "host": "redis.production.example.com", - "password": "production_redis_password" - } - }, - "queue": { - "redis": { - "host": "redis.production.example.com", - "password": "production_redis_password" - } - }, - "features": { - "autoTrading": true, - "notifications": true, - "emailAlerts": true, - "webhookAlerts": true - } -} \ No newline at end of file +{ + "environment": "production", + "log": { + "level": "warn", + "format": "json", + "loki": { + "enabled": true, + "host": "loki.production.example.com", + "port": 3100 + } + }, + "database": { + "postgres": { + "host": "postgres.production.example.com", + "ssl": true, + "poolSize": 50 + }, + "questdb": { + "host": "questdb.production.example.com" + }, + "mongodb": { + "uri": "mongodb+srv://prod_user:prod_pass@cluster.mongodb.net/stock?retryWrites=true&w=majority", + "poolSize": 50 + }, + "dragonfly": { + "host": "redis.production.example.com", + "password": "production_redis_password" + } + }, + "queue": { + "redis": { + "host": "redis.production.example.com", + "password": "production_redis_password" + } + }, + "features": { + "autoTrading": true, + "notifications": true, + "emailAlerts": true, + "webhookAlerts": true + } +} diff --git a/apps/stock/config/package.json b/apps/stock/config/package.json index bae8220..242cb6e 100644 --- a/apps/stock/config/package.json +++ b/apps/stock/config/package.json @@ -1,23 +1,23 @@ -{ - "name": "@stock-bot/stock-config", - "version": "1.0.0", - "description": "Stock trading bot configuration", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsc", - "clean": "rm -rf dist", - "dev": "tsc --watch", - "test": "jest", - "lint": "eslint src --ext .ts" - }, - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "zod": "^3.22.4" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.3" - } -} \ No newline at end of file +{ + "name": "@stock-bot/stock-config", + "version": "1.0.0", + "description": "Stock trading bot configuration", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc", + "clean": "rm -rf dist", + "dev": "tsc --watch", + "test": "jest", + "lint": "eslint src --ext .ts" + }, + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "zod": "^3.22.4" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.3" + } +} diff --git a/apps/stock/config/src/config-instance.ts b/apps/stock/config/src/config-instance.ts index ddbe6f8..b956d79 100644 --- a/apps/stock/config/src/config-instance.ts +++ b/apps/stock/config/src/config-instance.ts @@ -1,7 +1,7 @@ -import { ConfigManager, createAppConfig } from '@stock-bot/config'; -import { stockAppSchema, type StockAppConfig } from './schemas'; import * as path from 'path'; +import { ConfigManager, createAppConfig } from '@stock-bot/config'; import { getLogger } from '@stock-bot/logger'; +import { stockAppSchema, type StockAppConfig } from './schemas'; let configInstance: ConfigManager | null = null; @@ -9,30 +9,35 @@ let configInstance: ConfigManager | null = null; * Initialize the stock application configuration * @param serviceName - Optional service name to override port configuration */ -export function initializeStockConfig(serviceName?: 'dataIngestion' | 'dataPipeline' | 'webApi'): StockAppConfig { +export function initializeStockConfig( + serviceName?: 'dataIngestion' | 'dataPipeline' | 'webApi' +): StockAppConfig { try { if (!configInstance) { configInstance = createAppConfig(stockAppSchema, { configPath: path.join(__dirname, '../config'), }); } - + const config = configInstance.initialize(stockAppSchema); - + // If a service name is provided, override the service port if (serviceName && config.services?.[serviceName]) { - const kebabName = serviceName.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, ''); + const kebabName = serviceName + .replace(/([A-Z])/g, '-$1') + .toLowerCase() + .replace(/^-/, ''); return { ...config, service: { ...config.service, port: config.services[serviceName].port, name: serviceName, // Keep original for backward compatibility - serviceName: kebabName // Standard kebab-case name - } + serviceName: kebabName, // Standard kebab-case name + }, }; } - + return config; } catch (error) { const logger = getLogger('stock-config'); @@ -85,4 +90,4 @@ export function isFeatureEnabled(feature: keyof StockAppConfig['features']): boo */ export function resetStockConfig(): void { configInstance = null; -} \ No newline at end of file +} diff --git a/apps/stock/config/src/index.ts b/apps/stock/config/src/index.ts index 2197dde..978169e 100644 --- a/apps/stock/config/src/index.ts +++ b/apps/stock/config/src/index.ts @@ -1,15 +1,15 @@ -// Export schemas -export * from './schemas'; - -// Export config instance functions -export { - initializeStockConfig, - getStockConfig, - getServiceConfig, - getProviderConfig, - isFeatureEnabled, - resetStockConfig, -} from './config-instance'; - -// Re-export type for convenience -export type { StockAppConfig } from './schemas/stock-app.schema'; \ No newline at end of file +// Export schemas +export * from './schemas'; + +// Export config instance functions +export { + initializeStockConfig, + getStockConfig, + getServiceConfig, + getProviderConfig, + isFeatureEnabled, + resetStockConfig, +} from './config-instance'; + +// Re-export type for convenience +export type { StockAppConfig } from './schemas/stock-app.schema'; diff --git a/apps/stock/config/src/schemas/features.schema.ts b/apps/stock/config/src/schemas/features.schema.ts index 5946029..1a0dfc1 100644 --- a/apps/stock/config/src/schemas/features.schema.ts +++ b/apps/stock/config/src/schemas/features.schema.ts @@ -1,35 +1,35 @@ -import { z } from 'zod'; - -/** - * Feature flags for the stock trading application - */ -export const featuresSchema = z.object({ - // Trading features - realtime: z.boolean().default(true), - backtesting: z.boolean().default(true), - paperTrading: z.boolean().default(true), - autoTrading: z.boolean().default(false), - - // Data features - historicalData: z.boolean().default(true), - realtimeData: z.boolean().default(true), - fundamentalData: z.boolean().default(true), - newsAnalysis: z.boolean().default(false), - - // Notification features - notifications: z.boolean().default(false), - emailAlerts: z.boolean().default(false), - smsAlerts: z.boolean().default(false), - webhookAlerts: z.boolean().default(false), - - // Analysis features - technicalAnalysis: z.boolean().default(true), - sentimentAnalysis: z.boolean().default(false), - patternRecognition: z.boolean().default(false), - - // Risk management - riskManagement: z.boolean().default(true), - positionSizing: z.boolean().default(true), - stopLoss: z.boolean().default(true), - takeProfit: z.boolean().default(true), -}); \ No newline at end of file +import { z } from 'zod'; + +/** + * Feature flags for the stock trading application + */ +export const featuresSchema = z.object({ + // Trading features + realtime: z.boolean().default(true), + backtesting: z.boolean().default(true), + paperTrading: z.boolean().default(true), + autoTrading: z.boolean().default(false), + + // Data features + historicalData: z.boolean().default(true), + realtimeData: z.boolean().default(true), + fundamentalData: z.boolean().default(true), + newsAnalysis: z.boolean().default(false), + + // Notification features + notifications: z.boolean().default(false), + emailAlerts: z.boolean().default(false), + smsAlerts: z.boolean().default(false), + webhookAlerts: z.boolean().default(false), + + // Analysis features + technicalAnalysis: z.boolean().default(true), + sentimentAnalysis: z.boolean().default(false), + patternRecognition: z.boolean().default(false), + + // Risk management + riskManagement: z.boolean().default(true), + positionSizing: z.boolean().default(true), + stopLoss: z.boolean().default(true), + takeProfit: z.boolean().default(true), +}); diff --git a/apps/stock/config/src/schemas/index.ts b/apps/stock/config/src/schemas/index.ts index 6ab54d6..9f6d03e 100644 --- a/apps/stock/config/src/schemas/index.ts +++ b/apps/stock/config/src/schemas/index.ts @@ -1,3 +1,3 @@ -export * from './stock-app.schema'; -export * from './providers.schema'; -export * from './features.schema'; \ No newline at end of file +export * from './stock-app.schema'; +export * from './providers.schema'; +export * from './features.schema'; diff --git a/apps/stock/config/src/schemas/providers.schema.ts b/apps/stock/config/src/schemas/providers.schema.ts index 992da6a..98f0c02 100644 --- a/apps/stock/config/src/schemas/providers.schema.ts +++ b/apps/stock/config/src/schemas/providers.schema.ts @@ -1,67 +1,67 @@ -import { z } from 'zod'; - -// Base provider configuration -export const baseProviderConfigSchema = z.object({ - name: z.string(), - enabled: z.boolean().default(true), - priority: z.number().default(0), - rateLimit: z - .object({ - maxRequests: z.number().default(100), - windowMs: z.number().default(60000), - }) - .optional(), - timeout: z.number().default(30000), - retries: z.number().default(3), -}); - -// EOD Historical Data provider -export const eodProviderConfigSchema = baseProviderConfigSchema.extend({ - apiKey: z.string(), - baseUrl: z.string().default('https://eodhistoricaldata.com/api'), - tier: z.enum(['free', 'fundamentals', 'all-in-one']).default('free'), -}); - -// Interactive Brokers provider -export const ibProviderConfigSchema = baseProviderConfigSchema.extend({ - gateway: z.object({ - host: z.string().default('localhost'), - port: z.number().default(5000), - clientId: z.number().default(1), - }), - account: z.string().optional(), - marketDataType: z.enum(['live', 'delayed', 'frozen']).default('delayed'), -}); - -// QuoteMedia provider -export const qmProviderConfigSchema = baseProviderConfigSchema.extend({ - username: z.string(), - password: z.string(), - baseUrl: z.string().default('https://app.quotemedia.com/quotetools'), - webmasterId: z.string(), -}); - -// Yahoo Finance provider -export const yahooProviderConfigSchema = baseProviderConfigSchema.extend({ - baseUrl: z.string().default('https://query1.finance.yahoo.com'), - cookieJar: z.boolean().default(true), - crumb: z.string().optional(), -}); - -// Combined provider configuration -export const providersSchema = z.object({ - eod: eodProviderConfigSchema.optional(), - ib: ibProviderConfigSchema.optional(), - qm: qmProviderConfigSchema.optional(), - yahoo: yahooProviderConfigSchema.optional(), -}); - -// Dynamic provider configuration type -export type ProviderName = 'eod' | 'ib' | 'qm' | 'yahoo'; - -export const providerSchemas = { - eod: eodProviderConfigSchema, - ib: ibProviderConfigSchema, - qm: qmProviderConfigSchema, - yahoo: yahooProviderConfigSchema, -} as const; \ No newline at end of file +import { z } from 'zod'; + +// Base provider configuration +export const baseProviderConfigSchema = z.object({ + name: z.string(), + enabled: z.boolean().default(true), + priority: z.number().default(0), + rateLimit: z + .object({ + maxRequests: z.number().default(100), + windowMs: z.number().default(60000), + }) + .optional(), + timeout: z.number().default(30000), + retries: z.number().default(3), +}); + +// EOD Historical Data provider +export const eodProviderConfigSchema = baseProviderConfigSchema.extend({ + apiKey: z.string(), + baseUrl: z.string().default('https://eodhistoricaldata.com/api'), + tier: z.enum(['free', 'fundamentals', 'all-in-one']).default('free'), +}); + +// Interactive Brokers provider +export const ibProviderConfigSchema = baseProviderConfigSchema.extend({ + gateway: z.object({ + host: z.string().default('localhost'), + port: z.number().default(5000), + clientId: z.number().default(1), + }), + account: z.string().optional(), + marketDataType: z.enum(['live', 'delayed', 'frozen']).default('delayed'), +}); + +// QuoteMedia provider +export const qmProviderConfigSchema = baseProviderConfigSchema.extend({ + username: z.string(), + password: z.string(), + baseUrl: z.string().default('https://app.quotemedia.com/quotetools'), + webmasterId: z.string(), +}); + +// Yahoo Finance provider +export const yahooProviderConfigSchema = baseProviderConfigSchema.extend({ + baseUrl: z.string().default('https://query1.finance.yahoo.com'), + cookieJar: z.boolean().default(true), + crumb: z.string().optional(), +}); + +// Combined provider configuration +export const providersSchema = z.object({ + eod: eodProviderConfigSchema.optional(), + ib: ibProviderConfigSchema.optional(), + qm: qmProviderConfigSchema.optional(), + yahoo: yahooProviderConfigSchema.optional(), +}); + +// Dynamic provider configuration type +export type ProviderName = 'eod' | 'ib' | 'qm' | 'yahoo'; + +export const providerSchemas = { + eod: eodProviderConfigSchema, + ib: ibProviderConfigSchema, + qm: qmProviderConfigSchema, + yahoo: yahooProviderConfigSchema, +} as const; diff --git a/apps/stock/config/src/schemas/stock-app.schema.ts b/apps/stock/config/src/schemas/stock-app.schema.ts index 570971b..a06b960 100644 --- a/apps/stock/config/src/schemas/stock-app.schema.ts +++ b/apps/stock/config/src/schemas/stock-app.schema.ts @@ -1,72 +1,96 @@ -import { z } from 'zod'; -import { - baseAppSchema, - postgresConfigSchema, - mongodbConfigSchema, - questdbConfigSchema, - dragonflyConfigSchema -} from '@stock-bot/config'; -import { providersSchema } from './providers.schema'; -import { featuresSchema } from './features.schema'; - -/** - * Stock trading application configuration schema - */ -export const stockAppSchema = baseAppSchema.extend({ - // Stock app uses all databases - database: z.object({ - postgres: postgresConfigSchema, - mongodb: mongodbConfigSchema, - questdb: questdbConfigSchema, - dragonfly: dragonflyConfigSchema, - }), - - // Stock-specific providers - providers: providersSchema, - - // Feature flags - features: featuresSchema, - - // Service-specific configurations - services: z.object({ - dataIngestion: z.object({ - port: z.number().default(2001), - workers: z.number().default(4), - queues: z.record(z.object({ - concurrency: z.number().default(1), - })).optional(), - rateLimit: z.object({ - enabled: z.boolean().default(true), - requestsPerSecond: z.number().default(10), - }).optional(), - }).optional(), - dataPipeline: z.object({ - port: z.number().default(2002), - workers: z.number().default(2), - batchSize: z.number().default(1000), - processingInterval: z.number().default(60000), - queues: z.record(z.object({ - concurrency: z.number().default(1), - })).optional(), - syncOptions: z.object({ - maxRetries: z.number().default(3), - retryDelay: z.number().default(5000), - timeout: z.number().default(300000), - }).optional(), - }).optional(), - webApi: z.object({ - port: z.number().default(2003), - rateLimitPerMinute: z.number().default(60), - cache: z.object({ - ttl: z.number().default(300), - checkPeriod: z.number().default(60), - }).optional(), - cors: z.object({ - origins: z.array(z.string()).default(['http://localhost:3000']), - credentials: z.boolean().default(true), - }).optional(), - }).optional(), - }).optional(), -}); - -export type StockAppConfig = z.infer; \ No newline at end of file +import { z } from 'zod'; +import { + baseAppSchema, + dragonflyConfigSchema, + mongodbConfigSchema, + postgresConfigSchema, + questdbConfigSchema, +} from '@stock-bot/config'; +import { featuresSchema } from './features.schema'; +import { providersSchema } from './providers.schema'; + +/** + * Stock trading application configuration schema + */ +export const stockAppSchema = baseAppSchema.extend({ + // Stock app uses all databases + database: z.object({ + postgres: postgresConfigSchema, + mongodb: mongodbConfigSchema, + questdb: questdbConfigSchema, + dragonfly: dragonflyConfigSchema, + }), + + // Stock-specific providers + providers: providersSchema, + + // Feature flags + features: featuresSchema, + + // Service-specific configurations + services: z + .object({ + dataIngestion: z + .object({ + port: z.number().default(2001), + workers: z.number().default(4), + queues: z + .record( + z.object({ + concurrency: z.number().default(1), + }) + ) + .optional(), + rateLimit: z + .object({ + enabled: z.boolean().default(true), + requestsPerSecond: z.number().default(10), + }) + .optional(), + }) + .optional(), + dataPipeline: z + .object({ + port: z.number().default(2002), + workers: z.number().default(2), + batchSize: z.number().default(1000), + processingInterval: z.number().default(60000), + queues: z + .record( + z.object({ + concurrency: z.number().default(1), + }) + ) + .optional(), + syncOptions: z + .object({ + maxRetries: z.number().default(3), + retryDelay: z.number().default(5000), + timeout: z.number().default(300000), + }) + .optional(), + }) + .optional(), + webApi: z + .object({ + port: z.number().default(2003), + rateLimitPerMinute: z.number().default(60), + cache: z + .object({ + ttl: z.number().default(300), + checkPeriod: z.number().default(60), + }) + .optional(), + cors: z + .object({ + origins: z.array(z.string()).default(['http://localhost:3000']), + credentials: z.boolean().default(true), + }) + .optional(), + }) + .optional(), + }) + .optional(), +}); + +export type StockAppConfig = z.infer; diff --git a/apps/stock/config/tsconfig.json b/apps/stock/config/tsconfig.json index 59ed31f..1f05392 100644 --- a/apps/stock/config/tsconfig.json +++ b/apps/stock/config/tsconfig.json @@ -1,15 +1,13 @@ -{ - "extends": "../../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src", - "composite": true, - "declaration": true, - "declarationMap": true - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts"], - "references": [ - { "path": "../../../libs/core/config" } - ] -} \ No newline at end of file +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src", + "composite": true, + "declaration": true, + "declarationMap": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts"], + "references": [{ "path": "../../../libs/core/config" }] +} diff --git a/apps/stock/data-ingestion/src/handlers/ceo/actions/process-individual-symbol.action.ts b/apps/stock/data-ingestion/src/handlers/ceo/actions/process-individual-symbol.action.ts index 58096c6..e8e57da 100644 --- a/apps/stock/data-ingestion/src/handlers/ceo/actions/process-individual-symbol.action.ts +++ b/apps/stock/data-ingestion/src/handlers/ceo/actions/process-individual-symbol.action.ts @@ -95,10 +95,14 @@ export async function processIndividualSymbol( await this.mongodb.batchUpsert('ceoShorts', shortData.positions, ['id']); } - await this.scheduleOperation('process-individual-symbol', { - ceoId: ceoId, - timestamp: latestSpielTime, - }, {priority: 0}); + await this.scheduleOperation( + 'process-individual-symbol', + { + ceoId: ceoId, + timestamp: latestSpielTime, + }, + { priority: 0 } + ); } this.logger.info( diff --git a/apps/stock/data-ingestion/src/handlers/ceo/actions/update-unique-symbols.action.ts b/apps/stock/data-ingestion/src/handlers/ceo/actions/update-unique-symbols.action.ts index 1a6ff82..0d9e6e3 100644 --- a/apps/stock/data-ingestion/src/handlers/ceo/actions/update-unique-symbols.action.ts +++ b/apps/stock/data-ingestion/src/handlers/ceo/actions/update-unique-symbols.action.ts @@ -31,10 +31,14 @@ export async function updateUniqueSymbols( let scheduledJobs = 0; for (const symbol of uniqueSymbols) { // Schedule a job to process this individual symbol - await this.scheduleOperation('process-individual-symbol', { - ceoId: symbol.ceoId, - symbol: symbol.symbol, - }, {priority: 10 }); + await this.scheduleOperation( + 'process-individual-symbol', + { + ceoId: symbol.ceoId, + symbol: symbol.symbol, + }, + { priority: 10 } + ); scheduledJobs++; // Add small delay to avoid overwhelming the queue diff --git a/apps/stock/data-ingestion/src/handlers/ib/actions/fetch-exchanges-and-symbols.action.ts b/apps/stock/data-ingestion/src/handlers/ib/actions/fetch-exchanges-and-symbols.action.ts index dc8d8ac..f24e883 100644 --- a/apps/stock/data-ingestion/src/handlers/ib/actions/fetch-exchanges-and-symbols.action.ts +++ b/apps/stock/data-ingestion/src/handlers/ib/actions/fetch-exchanges-and-symbols.action.ts @@ -1,6 +1,6 @@ import type { IServiceContainer } from '@stock-bot/handlers'; -import { fetchSession } from './fetch-session.action'; import { fetchExchanges } from './fetch-exchanges.action'; +import { fetchSession } from './fetch-session.action'; import { fetchSymbols } from './fetch-symbols.action'; export async function fetchExchangesAndSymbols(services: IServiceContainer): Promise { @@ -38,5 +38,3 @@ export async function fetchExchangesAndSymbols(services: IServiceContainer): Pro }; } } - - diff --git a/apps/stock/data-ingestion/src/handlers/ib/actions/fetch-exchanges.action.ts b/apps/stock/data-ingestion/src/handlers/ib/actions/fetch-exchanges.action.ts index 9a8916c..3697744 100644 --- a/apps/stock/data-ingestion/src/handlers/ib/actions/fetch-exchanges.action.ts +++ b/apps/stock/data-ingestion/src/handlers/ib/actions/fetch-exchanges.action.ts @@ -1,4 +1,4 @@ -import type { IServiceContainer } from '@stock-bot/handlers'; +import type { IServiceContainer } from '@stock-bot/types'; import { IB_CONFIG } from '../shared/config'; import { fetchSession } from './fetch-session.action'; @@ -52,11 +52,15 @@ export async function fetchExchanges(services: IServiceContainer): Promise | undefined> { +export async function fetchSession( + services: IServiceContainer +): Promise | undefined> { try { await Browser.initialize({ headless: true, @@ -80,5 +82,3 @@ export async function fetchSession(services: IServiceContainer): Promise { try { - // Auto-register all handlers in this directory - const result = await autoRegisterHandlers(__dirname, serviceContainer, { - pattern: '.handler.', - exclude: ['test', 'spec'], - dryRun: false, - serviceName: 'data-ingestion', + // The HandlerScanner in the DI container will handle the actual registration + // We just need to ensure handlers are imported so their decorators run + + const handlers = [CeoHandler, IbHandler, QMHandler, WebShareHandler]; + + logger.info('Handler imports loaded', { + count: handlers.length, + handlers: handlers.map(h => (h as any).__handlerName || h.name), }); - logger.info('Handler auto-registration complete', { - registered: result.registered, - failed: result.failed, - }); - - if (result.failed.length > 0) { - logger.error('Some handlers failed to register', { failed: result.failed }); + // If the container has a handler scanner, we can manually register these + const scanner = (serviceContainer as any).handlerScanner; + if (scanner?.registerHandlerClass) { + for (const HandlerClass of handlers) { + scanner.registerHandlerClass(HandlerClass, { serviceName: 'data-ingestion' }); + } + logger.info('Handlers registered with scanner'); } } catch (error) { - logger.error('Handler auto-registration failed', { error }); - // Fall back to manual registration - await manualHandlerRegistration(serviceContainer); - } -} - -/** - * Manual fallback registration - */ -async function manualHandlerRegistration(_serviceContainer: IServiceContainer): Promise { - logger.warn('Falling back to manual handler registration'); - - try { - - logger.info('Manual handler registration complete'); - } catch (error) { - logger.error('Manual handler registration failed', { error }); + logger.error('Handler initialization failed', { error }); throw error; } } diff --git a/apps/stock/data-ingestion/src/handlers/qm/actions/exchanges.action.ts b/apps/stock/data-ingestion/src/handlers/qm/actions/exchanges.action.ts index dcc0018..6f85b12 100644 --- a/apps/stock/data-ingestion/src/handlers/qm/actions/exchanges.action.ts +++ b/apps/stock/data-ingestion/src/handlers/qm/actions/exchanges.action.ts @@ -15,12 +15,18 @@ interface QMExchange { export async function fetchExchanges(services: IServiceContainer): Promise { // Get exchanges from MongoDB - const exchanges = await services.mongodb.collection('qm_exchanges').find({}).toArray(); + const exchanges = await services.mongodb + .collection('qm_exchanges') + .find({}) + .toArray(); return exchanges; } -export async function getExchangeByCode(services: IServiceContainer, code: string): Promise { +export async function getExchangeByCode( + services: IServiceContainer, + code: string +): Promise { // Get specific exchange by code const exchange = await services.mongodb.collection('qm_exchanges').findOne({ code }); diff --git a/apps/stock/data-ingestion/src/handlers/qm/actions/symbols.action.ts b/apps/stock/data-ingestion/src/handlers/qm/actions/symbols.action.ts index 493b402..f0def4b 100644 --- a/apps/stock/data-ingestion/src/handlers/qm/actions/symbols.action.ts +++ b/apps/stock/data-ingestion/src/handlers/qm/actions/symbols.action.ts @@ -16,12 +16,19 @@ interface QMSymbol { export async function searchSymbols(services: IServiceContainer): Promise { // Get symbols from MongoDB - const symbols = await services.mongodb.collection('qm_symbols').find({}).limit(50).toArray(); + const symbols = await services.mongodb + .collection('qm_symbols') + .find({}) + .limit(50) + .toArray(); return symbols; } -export async function fetchSymbolData(services: IServiceContainer, symbol: string): Promise { +export async function fetchSymbolData( + services: IServiceContainer, + symbol: string +): Promise { // Fetch data for a specific symbol const symbolData = await services.mongodb.collection('qm_symbols').findOne({ symbol }); diff --git a/apps/stock/data-ingestion/src/handlers/qm/qm.handler.ts b/apps/stock/data-ingestion/src/handlers/qm/qm.handler.ts index b0bc5e3..6433566 100644 --- a/apps/stock/data-ingestion/src/handlers/qm/qm.handler.ts +++ b/apps/stock/data-ingestion/src/handlers/qm/qm.handler.ts @@ -1,7 +1,7 @@ import { BaseHandler, Handler, type IServiceContainer } from '@stock-bot/handlers'; @Handler('qm') -class QMHandler extends BaseHandler { +export class QMHandler extends BaseHandler { constructor(services: IServiceContainer) { super(services); // Handler name read from @Handler decorator } diff --git a/apps/stock/data-ingestion/src/handlers/webshare/webshare.handler.ts b/apps/stock/data-ingestion/src/handlers/webshare/webshare.handler.ts index bb7623a..28bec9f 100644 --- a/apps/stock/data-ingestion/src/handlers/webshare/webshare.handler.ts +++ b/apps/stock/data-ingestion/src/handlers/webshare/webshare.handler.ts @@ -4,17 +4,18 @@ import { Operation, QueueSchedule, type ExecutionContext, - type IServiceContainer + type IServiceContainer, } from '@stock-bot/handlers'; @Handler('webshare') -class WebShareHandler extends BaseHandler { +export class WebShareHandler extends BaseHandler { constructor(services: IServiceContainer) { super(services); } @Operation('fetch-proxies') - @QueueSchedule('0 */6 * * *', { // every 6 hours + @QueueSchedule('0 */6 * * *', { + // every 6 hours priority: 3, immediately: false, // Don't run immediately since ProxyManager fetches on startup description: 'Refresh proxies from WebShare API', diff --git a/apps/stock/data-ingestion/src/index.ts b/apps/stock/data-ingestion/src/index.ts index a42ca94..4622d21 100644 --- a/apps/stock/data-ingestion/src/index.ts +++ b/apps/stock/data-ingestion/src/index.ts @@ -3,15 +3,12 @@ * Simplified entry point using ServiceApplication framework */ -import { initializeStockConfig, type StockAppConfig } from '@stock-bot/stock-config'; -import { - ServiceApplication, -} from '@stock-bot/di'; +import { ServiceApplication } from '@stock-bot/di'; import { getLogger } from '@stock-bot/logger'; - +import { initializeStockConfig, type StockAppConfig } from '@stock-bot/stock-config'; +import { createRoutes } from './routes/create-routes'; // Local imports import { initializeAllHandlers } from './handlers'; -import { createRoutes } from './routes/create-routes'; // Initialize configuration with service-specific overrides const config = initializeStockConfig('dataIngestion'); @@ -44,7 +41,7 @@ const app = new ServiceApplication( }, { // Lifecycle hooks if needed - onStarted: (_port) => { + onStarted: _port => { const logger = getLogger('data-ingestion'); logger.info('Data ingestion service startup initiated with ServiceApplication framework'); }, @@ -54,7 +51,7 @@ const app = new ServiceApplication( // Container factory function async function createContainer(config: StockAppConfig) { const { ServiceContainerBuilder } = await import('@stock-bot/di'); - + const container = await new ServiceContainerBuilder() .withConfig(config) .withOptions({ @@ -67,14 +64,13 @@ async function createContainer(config: StockAppConfig) { enableProxy: true, // Data ingestion needs proxy for rate limiting }) .build(); // This automatically initializes services - + return container; } - // Start the service app.start(createContainer, createRoutes, initializeAllHandlers).catch(error => { const logger = getLogger('data-ingestion'); logger.fatal('Failed to start data service', { error }); process.exit(1); -}); \ No newline at end of file +}); diff --git a/apps/stock/data-ingestion/src/routes/market-data.routes.ts b/apps/stock/data-ingestion/src/routes/market-data.routes.ts index 562ccbe..12dc6b4 100644 --- a/apps/stock/data-ingestion/src/routes/market-data.routes.ts +++ b/apps/stock/data-ingestion/src/routes/market-data.routes.ts @@ -2,9 +2,9 @@ * Market data routes */ import { Hono } from 'hono'; +import type { IServiceContainer } from '@stock-bot/handlers'; import { getLogger } from '@stock-bot/logger'; import { processItems } from '@stock-bot/queue'; -import type { IServiceContainer } from '@stock-bot/handlers'; const logger = getLogger('market-data-routes'); @@ -22,7 +22,7 @@ export function createMarketDataRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ status: 'error', message: 'Queue manager not available' }, 503); } - + const queue = queueManager.getQueue('yahoo-finance'); const job = await queue.add('live-data', { handler: 'yahoo-finance', @@ -57,7 +57,7 @@ export function createMarketDataRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ status: 'error', message: 'Queue manager not available' }, 503); } - + const queue = queueManager.getQueue('yahoo-finance'); const job = await queue.add('historical-data', { handler: 'yahoo-finance', @@ -110,18 +110,23 @@ export function createMarketDataRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ status: 'error', message: 'Queue manager not available' }, 503); } - - const result = await processItems(symbols, provider, { - handler: provider, - operation, - totalDelayHours, - useBatching, - batchSize, - priority: 2, - retries: 2, - removeOnComplete: 5, - removeOnFail: 10, - }, queueManager); + + const result = await processItems( + symbols, + provider, + { + handler: provider, + operation, + totalDelayHours, + useBatching, + batchSize, + priority: 2, + retries: 2, + removeOnComplete: 5, + removeOnFail: 10, + }, + queueManager + ); return c.json({ status: 'success', @@ -139,4 +144,4 @@ export function createMarketDataRoutes(container: IServiceContainer) { } // Legacy export for backward compatibility -export const marketDataRoutes = createMarketDataRoutes({} as IServiceContainer); \ No newline at end of file +export const marketDataRoutes = createMarketDataRoutes({} as IServiceContainer); diff --git a/apps/stock/data-ingestion/src/routes/queue.routes.ts b/apps/stock/data-ingestion/src/routes/queue.routes.ts index 2a6fab8..ddad40a 100644 --- a/apps/stock/data-ingestion/src/routes/queue.routes.ts +++ b/apps/stock/data-ingestion/src/routes/queue.routes.ts @@ -1,6 +1,6 @@ import { Hono } from 'hono'; -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; const logger = getLogger('queue-routes'); @@ -14,7 +14,7 @@ export function createQueueRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ status: 'error', message: 'Queue manager not available' }, 503); } - + const globalStats = await queueManager.getGlobalStats(); return c.json({ @@ -29,4 +29,4 @@ export function createQueueRoutes(container: IServiceContainer) { }); return queue; -} \ No newline at end of file +} diff --git a/apps/stock/data-pipeline/src/container-setup.ts b/apps/stock/data-pipeline/src/container-setup.ts index 1482d04..64df20e 100644 --- a/apps/stock/data-pipeline/src/container-setup.ts +++ b/apps/stock/data-pipeline/src/container-setup.ts @@ -1,34 +1,34 @@ -/** - * Service Container Setup for Data Pipeline - * Configures dependency injection for the data pipeline service - */ - -import type { IServiceContainer } from '@stock-bot/handlers'; -import { getLogger } from '@stock-bot/logger'; -import type { AppConfig } from '@stock-bot/config'; - -const logger = getLogger('data-pipeline-container'); - -/** - * Configure the service container for data pipeline workloads - */ -export function setupServiceContainer( - config: AppConfig, - container: IServiceContainer -): IServiceContainer { - logger.info('Configuring data pipeline service container...'); - - // Data pipeline specific configuration - // This service does more complex queries and transformations - const poolSizes = { - mongodb: config.environment === 'production' ? 40 : 20, - postgres: config.environment === 'production' ? 50 : 25, - cache: config.environment === 'production' ? 30 : 15, - }; - - logger.info('Data pipeline pool sizes configured', poolSizes); - - // The container is already configured with connections - // Just return it with our logging - return container; -} \ No newline at end of file +/** + * Service Container Setup for Data Pipeline + * Configures dependency injection for the data pipeline service + */ + +import type { AppConfig } from '@stock-bot/config'; +import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; + +const logger = getLogger('data-pipeline-container'); + +/** + * Configure the service container for data pipeline workloads + */ +export function setupServiceContainer( + config: AppConfig, + container: IServiceContainer +): IServiceContainer { + logger.info('Configuring data pipeline service container...'); + + // Data pipeline specific configuration + // This service does more complex queries and transformations + const poolSizes = { + mongodb: config.environment === 'production' ? 40 : 20, + postgres: config.environment === 'production' ? 50 : 25, + cache: config.environment === 'production' ? 30 : 15, + }; + + logger.info('Data pipeline pool sizes configured', poolSizes); + + // The container is already configured with connections + // Just return it with our logging + return container; +} diff --git a/apps/stock/data-pipeline/src/handlers/exchanges/exchanges.handler.ts b/apps/stock/data-pipeline/src/handlers/exchanges/exchanges.handler.ts index ae645c5..628a7be 100644 --- a/apps/stock/data-pipeline/src/handlers/exchanges/exchanges.handler.ts +++ b/apps/stock/data-pipeline/src/handlers/exchanges/exchanges.handler.ts @@ -1,111 +1,113 @@ -import { - BaseHandler, - Handler, - Operation, - ScheduledOperation, - type IServiceContainer, -} from '@stock-bot/handlers'; -import { clearPostgreSQLData } from './operations/clear-postgresql-data.operations'; -import { getSyncStatus } from './operations/enhanced-sync-status.operations'; -import { getExchangeStats } from './operations/exchange-stats.operations'; -import { getProviderMappingStats } from './operations/provider-mapping-stats.operations'; -import { syncQMExchanges } from './operations/qm-exchanges.operations'; -import { syncAllExchanges } from './operations/sync-all-exchanges.operations'; -import { syncIBExchanges } from './operations/sync-ib-exchanges.operations'; -import { syncQMProviderMappings } from './operations/sync-qm-provider-mappings.operations'; - -@Handler('exchanges') -class ExchangesHandler extends BaseHandler { - constructor(services: IServiceContainer) { - super(services); - } - - /** - * Sync all exchanges - weekly full sync - */ - @Operation('sync-all-exchanges') - @ScheduledOperation('sync-all-exchanges', '0 0 * * 0', { - priority: 10, - description: 'Weekly full exchange sync on Sunday at midnight', - }) - async syncAllExchanges(payload?: { clearFirst?: boolean }): Promise { - const finalPayload = payload || { clearFirst: true }; - this.log('info', 'Starting sync of all exchanges', finalPayload); - return syncAllExchanges(finalPayload, this.services); - } - - /** - * Sync exchanges from QuestionsAndMethods - */ - @Operation('sync-qm-exchanges') - @ScheduledOperation('sync-qm-exchanges', '0 1 * * *', { - priority: 5, - description: 'Daily sync of QM exchanges at 1 AM', - }) - async syncQMExchanges(): Promise { - this.log('info', 'Starting QM exchanges sync...'); - return syncQMExchanges({}, this.services); - } - - /** - * Sync exchanges from Interactive Brokers - */ - @Operation('sync-ib-exchanges') - @ScheduledOperation('sync-ib-exchanges', '0 3 * * *', { - priority: 3, - description: 'Daily sync of IB exchanges at 3 AM', - }) - async syncIBExchanges(): Promise { - this.log('info', 'Starting IB exchanges sync...'); - return syncIBExchanges({}, this.services); - } - - /** - * Sync provider mappings from QuestionsAndMethods - */ - @Operation('sync-qm-provider-mappings') - @ScheduledOperation('sync-qm-provider-mappings', '0 3 * * *', { - priority: 7, - description: 'Daily sync of QM provider mappings at 3 AM', - }) - async syncQMProviderMappings(): Promise { - this.log('info', 'Starting QM provider mappings sync...'); - return syncQMProviderMappings({}, this.services); - } - - /** - * Clear PostgreSQL data - maintenance operation - */ - @Operation('clear-postgresql-data') - async clearPostgreSQLData(payload: { type?: 'exchanges' | 'provider_mappings' | 'all' }): Promise { - this.log('warn', 'Clearing PostgreSQL data', payload); - return clearPostgreSQLData(payload, this.services); - } - - /** - * Get exchange statistics - */ - @Operation('get-exchange-stats') - async getExchangeStats(): Promise { - this.log('info', 'Getting exchange statistics...'); - return getExchangeStats({}, this.services); - } - - /** - * Get provider mapping statistics - */ - @Operation('get-provider-mapping-stats') - async getProviderMappingStats(): Promise { - this.log('info', 'Getting provider mapping statistics...'); - return getProviderMappingStats({}, this.services); - } - - /** - * Get enhanced sync status - */ - @Operation('enhanced-sync-status') - async getEnhancedSyncStatus(): Promise { - this.log('info', 'Getting enhanced sync status...'); - return getSyncStatus({}, this.services); - } -} \ No newline at end of file +import { + BaseHandler, + Handler, + Operation, + ScheduledOperation, +} from '@stock-bot/handlers'; +import type { IServiceContainer } from '@stock-bot/types'; +import { clearPostgreSQLData } from './operations/clear-postgresql-data.operations'; +import { getSyncStatus } from './operations/enhanced-sync-status.operations'; +import { getExchangeStats } from './operations/exchange-stats.operations'; +import { getProviderMappingStats } from './operations/provider-mapping-stats.operations'; +import { syncQMExchanges } from './operations/qm-exchanges.operations'; +import { syncAllExchanges } from './operations/sync-all-exchanges.operations'; +import { syncIBExchanges } from './operations/sync-ib-exchanges.operations'; +import { syncQMProviderMappings } from './operations/sync-qm-provider-mappings.operations'; + +@Handler('exchanges') +class ExchangesHandler extends BaseHandler { + constructor(services: IServiceContainer) { + super(services); + } + + /** + * Sync all exchanges - weekly full sync + */ + @Operation('sync-all-exchanges') + @ScheduledOperation('sync-all-exchanges', '0 0 * * 0', { + priority: 10, + description: 'Weekly full exchange sync on Sunday at midnight', + }) + async syncAllExchanges(payload?: { clearFirst?: boolean }): Promise { + const finalPayload = payload || { clearFirst: true }; + this.log('info', 'Starting sync of all exchanges', finalPayload); + return syncAllExchanges(finalPayload, this.services); + } + + /** + * Sync exchanges from QuestionsAndMethods + */ + @Operation('sync-qm-exchanges') + @ScheduledOperation('sync-qm-exchanges', '0 1 * * *', { + priority: 5, + description: 'Daily sync of QM exchanges at 1 AM', + }) + async syncQMExchanges(): Promise { + this.log('info', 'Starting QM exchanges sync...'); + return syncQMExchanges({}, this.services); + } + + /** + * Sync exchanges from Interactive Brokers + */ + @Operation('sync-ib-exchanges') + @ScheduledOperation('sync-ib-exchanges', '0 3 * * *', { + priority: 3, + description: 'Daily sync of IB exchanges at 3 AM', + }) + async syncIBExchanges(): Promise { + this.log('info', 'Starting IB exchanges sync...'); + return syncIBExchanges({}, this.services); + } + + /** + * Sync provider mappings from QuestionsAndMethods + */ + @Operation('sync-qm-provider-mappings') + @ScheduledOperation('sync-qm-provider-mappings', '0 3 * * *', { + priority: 7, + description: 'Daily sync of QM provider mappings at 3 AM', + }) + async syncQMProviderMappings(): Promise { + this.log('info', 'Starting QM provider mappings sync...'); + return syncQMProviderMappings({}, this.services); + } + + /** + * Clear PostgreSQL data - maintenance operation + */ + @Operation('clear-postgresql-data') + async clearPostgreSQLData(payload: { + type?: 'exchanges' | 'provider_mappings' | 'all'; + }): Promise { + this.log('warn', 'Clearing PostgreSQL data', payload); + return clearPostgreSQLData(payload, this.services); + } + + /** + * Get exchange statistics + */ + @Operation('get-exchange-stats') + async getExchangeStats(): Promise { + this.log('info', 'Getting exchange statistics...'); + return getExchangeStats({}, this.services); + } + + /** + * Get provider mapping statistics + */ + @Operation('get-provider-mapping-stats') + async getProviderMappingStats(): Promise { + this.log('info', 'Getting provider mapping statistics...'); + return getProviderMappingStats({}, this.services); + } + + /** + * Get enhanced sync status + */ + @Operation('enhanced-sync-status') + async getEnhancedSyncStatus(): Promise { + this.log('info', 'Getting enhanced sync status...'); + return getSyncStatus({}, this.services); + } +} diff --git a/apps/stock/data-pipeline/src/handlers/exchanges/operations/clear-postgresql-data.operations.ts b/apps/stock/data-pipeline/src/handlers/exchanges/operations/clear-postgresql-data.operations.ts index 733cd35..4e0a4d6 100644 --- a/apps/stock/data-pipeline/src/handlers/exchanges/operations/clear-postgresql-data.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/exchanges/operations/clear-postgresql-data.operations.ts @@ -1,5 +1,5 @@ -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-clear-postgresql-data'); diff --git a/apps/stock/data-pipeline/src/handlers/exchanges/operations/enhanced-sync-status.operations.ts b/apps/stock/data-pipeline/src/handlers/exchanges/operations/enhanced-sync-status.operations.ts index 96e5ad1..c3534d9 100644 --- a/apps/stock/data-pipeline/src/handlers/exchanges/operations/enhanced-sync-status.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/exchanges/operations/enhanced-sync-status.operations.ts @@ -1,5 +1,5 @@ -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; import type { JobPayload, SyncStatus } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-status'); diff --git a/apps/stock/data-pipeline/src/handlers/exchanges/operations/exchange-stats.operations.ts b/apps/stock/data-pipeline/src/handlers/exchanges/operations/exchange-stats.operations.ts index 9745090..ed14dc0 100644 --- a/apps/stock/data-pipeline/src/handlers/exchanges/operations/exchange-stats.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/exchanges/operations/exchange-stats.operations.ts @@ -1,5 +1,5 @@ -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-exchange-stats'); diff --git a/apps/stock/data-pipeline/src/handlers/exchanges/operations/provider-mapping-stats.operations.ts b/apps/stock/data-pipeline/src/handlers/exchanges/operations/provider-mapping-stats.operations.ts index 394ddd8..7d4962d 100644 --- a/apps/stock/data-pipeline/src/handlers/exchanges/operations/provider-mapping-stats.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/exchanges/operations/provider-mapping-stats.operations.ts @@ -1,5 +1,5 @@ -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-provider-mapping-stats'); diff --git a/apps/stock/data-pipeline/src/handlers/exchanges/operations/qm-exchanges.operations.ts b/apps/stock/data-pipeline/src/handlers/exchanges/operations/qm-exchanges.operations.ts index 80963fd..b0d077b 100644 --- a/apps/stock/data-pipeline/src/handlers/exchanges/operations/qm-exchanges.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/exchanges/operations/qm-exchanges.operations.ts @@ -1,5 +1,5 @@ +import type { IServiceContainer } from '@stock-bot/types'; import { getLogger } from '@stock-bot/logger'; -import type { IServiceContainer } from '@stock-bot/handlers'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('sync-qm-exchanges'); @@ -62,7 +62,10 @@ interface Exchange { visible: boolean; } -async function findExchange(exchangeCode: string, postgresClient: IServiceContainer['postgres']): Promise { +async function findExchange( + exchangeCode: string, + postgresClient: IServiceContainer['postgres'] +): Promise { const query = 'SELECT * FROM exchanges WHERE code = $1'; const result = await postgresClient.query(query, [exchangeCode]); return result.rows[0] || null; @@ -76,7 +79,10 @@ interface QMExchange { countryCode?: string; } -async function createExchange(qmExchange: QMExchange, postgresClient: IServiceContainer['postgres']): Promise { +async function createExchange( + qmExchange: QMExchange, + postgresClient: IServiceContainer['postgres'] +): Promise { const query = ` INSERT INTO exchanges (code, name, country, currency, visible) VALUES ($1, $2, $3, $4, $5) diff --git a/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-all-exchanges.operations.ts b/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-all-exchanges.operations.ts index 9dbfd57..ddaf1fb 100644 --- a/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-all-exchanges.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-all-exchanges.operations.ts @@ -1,10 +1,13 @@ -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; import type { JobPayload, SyncResult } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-all-exchanges'); -export async function syncAllExchanges(payload: JobPayload, container: IServiceContainer): Promise { +export async function syncAllExchanges( + payload: JobPayload, + container: IServiceContainer +): Promise { const clearFirst = payload.clearFirst || true; logger.info('Starting comprehensive exchange sync...', { clearFirst }); @@ -50,7 +53,6 @@ export async function syncAllExchanges(payload: JobPayload, container: IServiceC } } - async function clearPostgreSQLData(postgresClient: any): Promise { logger.info('Clearing existing PostgreSQL data...'); @@ -141,7 +143,11 @@ async function createProviderExchangeMapping( const postgresClient = container.postgres; // Check if mapping already exists - const existingMapping = await findProviderExchangeMapping(provider, providerExchangeCode, container); + const existingMapping = await findProviderExchangeMapping( + provider, + providerExchangeCode, + container + ); if (existingMapping) { // Don't override existing mappings to preserve manual work return; diff --git a/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-ib-exchanges.operations.ts b/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-ib-exchanges.operations.ts index 909a939..27b87dc 100644 --- a/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-ib-exchanges.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-ib-exchanges.operations.ts @@ -1,6 +1,6 @@ +import type { IServiceContainer } from '@stock-bot/handlers'; import { getLogger } from '@stock-bot/logger'; import type { MasterExchange } from '@stock-bot/mongodb'; -import type { IServiceContainer } from '@stock-bot/handlers'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('sync-ib-exchanges'); @@ -65,7 +65,10 @@ export async function syncIBExchanges( /** * Create or update master exchange record 1:1 from IB exchange */ -async function createOrUpdateMasterExchange(ibExchange: IBExchange, container: IServiceContainer): Promise { +async function createOrUpdateMasterExchange( + ibExchange: IBExchange, + container: IServiceContainer +): Promise { const mongoClient = container.mongodb; const db = mongoClient.getDatabase(); const collection = db.collection('masterExchanges'); diff --git a/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-qm-provider-mappings.operations.ts b/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-qm-provider-mappings.operations.ts index d37f41b..9e1ed56 100644 --- a/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-qm-provider-mappings.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/exchanges/operations/sync-qm-provider-mappings.operations.ts @@ -1,5 +1,5 @@ -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; import type { JobPayload, SyncResult } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-qm-provider-mappings'); @@ -86,7 +86,6 @@ export async function syncQMProviderMappings( } } - async function createProviderExchangeMapping( provider: string, providerExchangeCode: string, @@ -103,7 +102,11 @@ async function createProviderExchangeMapping( const postgresClient = container.postgres; // Check if mapping already exists - const existingMapping = await findProviderExchangeMapping(provider, providerExchangeCode, container); + const existingMapping = await findProviderExchangeMapping( + provider, + providerExchangeCode, + container + ); if (existingMapping) { // Don't override existing mappings to preserve manual work return; diff --git a/apps/stock/data-pipeline/src/handlers/index.ts b/apps/stock/data-pipeline/src/handlers/index.ts index 754e8d2..f6cadf8 100644 --- a/apps/stock/data-pipeline/src/handlers/index.ts +++ b/apps/stock/data-pipeline/src/handlers/index.ts @@ -1,42 +1,41 @@ -/** - * Handler auto-registration for data pipeline service - * Automatically discovers and registers all handlers - */ - -import type { IServiceContainer } from '@stock-bot/handlers'; -import { autoRegisterHandlers } from '@stock-bot/handlers'; -import { getLogger } from '@stock-bot/logger'; - -// Import handlers for bundling (ensures they're included in the build) -import './exchanges/exchanges.handler'; -import './symbols/symbols.handler'; - -const logger = getLogger('pipeline-handler-init'); - -/** - * Initialize and register all handlers automatically - */ -export async function initializeAllHandlers(container: IServiceContainer): Promise { - logger.info('Initializing data pipeline handlers...'); - - try { - // Auto-register all handlers in this directory - const result = await autoRegisterHandlers(__dirname, container, { - pattern: '.handler.', - exclude: ['test', 'spec', '.old'], - dryRun: false, - }); - - logger.info('Handler auto-registration complete', { - registered: result.registered, - failed: result.failed, - }); - - if (result.failed.length > 0) { - logger.error('Some handlers failed to register', { failed: result.failed }); - } - } catch (error) { - logger.error('Handler auto-registration failed', { error }); - throw error; - } -} \ No newline at end of file +/** + * Handler auto-registration for data pipeline service + * Automatically discovers and registers all handlers + */ + +import type { IServiceContainer } from '@stock-bot/handlers'; +import { autoRegisterHandlers } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; +// Import handlers for bundling (ensures they're included in the build) +import './exchanges/exchanges.handler'; +import './symbols/symbols.handler'; + +const logger = getLogger('pipeline-handler-init'); + +/** + * Initialize and register all handlers automatically + */ +export async function initializeAllHandlers(container: IServiceContainer): Promise { + logger.info('Initializing data pipeline handlers...'); + + try { + // Auto-register all handlers in this directory + const result = await autoRegisterHandlers(__dirname, container, { + pattern: '.handler.', + exclude: ['test', 'spec', '.old'], + dryRun: false, + }); + + logger.info('Handler auto-registration complete', { + registered: result.registered, + failed: result.failed, + }); + + if (result.failed.length > 0) { + logger.error('Some handlers failed to register', { failed: result.failed }); + } + } catch (error) { + logger.error('Handler auto-registration failed', { error }); + throw error; + } +} diff --git a/apps/stock/data-pipeline/src/handlers/symbols/operations/qm-symbols.operations.ts b/apps/stock/data-pipeline/src/handlers/symbols/operations/qm-symbols.operations.ts index eedfb21..d4a38db 100644 --- a/apps/stock/data-pipeline/src/handlers/symbols/operations/qm-symbols.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/symbols/operations/qm-symbols.operations.ts @@ -1,5 +1,5 @@ -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('sync-qm-symbols'); diff --git a/apps/stock/data-pipeline/src/handlers/symbols/operations/sync-status.operations.ts b/apps/stock/data-pipeline/src/handlers/symbols/operations/sync-status.operations.ts index d9b0719..fe5999b 100644 --- a/apps/stock/data-pipeline/src/handlers/symbols/operations/sync-status.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/symbols/operations/sync-status.operations.ts @@ -1,5 +1,5 @@ -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; import type { JobPayload } from '../../../types/job-payloads'; const logger = getLogger('sync-status'); diff --git a/apps/stock/data-pipeline/src/handlers/symbols/operations/sync-symbols-from-provider.operations.ts b/apps/stock/data-pipeline/src/handlers/symbols/operations/sync-symbols-from-provider.operations.ts index 7dbba35..5b33275 100644 --- a/apps/stock/data-pipeline/src/handlers/symbols/operations/sync-symbols-from-provider.operations.ts +++ b/apps/stock/data-pipeline/src/handlers/symbols/operations/sync-symbols-from-provider.operations.ts @@ -1,5 +1,5 @@ -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; import type { JobPayload, SyncResult } from '../../../types/job-payloads'; const logger = getLogger('enhanced-sync-symbols-from-provider'); @@ -104,7 +104,11 @@ async function processSingleSymbol( } // Find active provider exchange mapping - const providerMapping = await findActiveProviderExchangeMapping(provider, exchangeCode, container); + const providerMapping = await findActiveProviderExchangeMapping( + provider, + exchangeCode, + container + ); if (!providerMapping) { result.skipped++; @@ -145,14 +149,22 @@ async function findActiveProviderExchangeMapping( return result.rows[0] || null; } -async function findSymbolByCodeAndExchange(symbol: string, exchangeId: string, container: IServiceContainer): Promise { +async function findSymbolByCodeAndExchange( + symbol: string, + exchangeId: string, + container: IServiceContainer +): Promise { const postgresClient = container.postgres; const query = 'SELECT * FROM symbols WHERE symbol = $1 AND exchange_id = $2'; const result = await postgresClient.query(query, [symbol, exchangeId]); return result.rows[0] || null; } -async function createSymbol(symbol: any, exchangeId: string, container: IServiceContainer): Promise { +async function createSymbol( + symbol: any, + exchangeId: string, + container: IServiceContainer +): Promise { const postgresClient = container.postgres; const query = ` INSERT INTO symbols (symbol, exchange_id, company_name, country, currency) @@ -171,7 +183,11 @@ async function createSymbol(symbol: any, exchangeId: string, container: IService return result.rows[0].id; } -async function updateSymbol(symbolId: string, symbol: any, container: IServiceContainer): Promise { +async function updateSymbol( + symbolId: string, + symbol: any, + container: IServiceContainer +): Promise { const postgresClient = container.postgres; const query = ` UPDATE symbols diff --git a/apps/stock/data-pipeline/src/handlers/symbols/symbols.handler.ts b/apps/stock/data-pipeline/src/handlers/symbols/symbols.handler.ts index 79083ca..2c55478 100644 --- a/apps/stock/data-pipeline/src/handlers/symbols/symbols.handler.ts +++ b/apps/stock/data-pipeline/src/handlers/symbols/symbols.handler.ts @@ -1,68 +1,71 @@ -import { - BaseHandler, - Handler, - Operation, - ScheduledOperation, - type IServiceContainer, -} from '@stock-bot/handlers'; -import { syncQMSymbols } from './operations/qm-symbols.operations'; -import { syncSymbolsFromProvider } from './operations/sync-symbols-from-provider.operations'; -import { getSyncStatus } from './operations/sync-status.operations'; - -@Handler('symbols') -class SymbolsHandler extends BaseHandler { - constructor(services: IServiceContainer) { - super(services); - } - - /** - * Sync symbols from QuestionsAndMethods API - */ - @ScheduledOperation('sync-qm-symbols', '0 2 * * *', { - priority: 5, - description: 'Daily sync of QM symbols at 2 AM', - }) - async syncQMSymbols(): Promise<{ processed: number; created: number; updated: number }> { - this.log('info', 'Starting QM symbols sync...'); - return syncQMSymbols({}, this.services); - } - - /** - * Sync symbols from specific provider - */ - @Operation('sync-symbols-qm') - @ScheduledOperation('sync-symbols-qm', '0 4 * * *', { - priority: 5, - description: 'Daily sync of symbols from QM provider at 4 AM', - }) - async syncSymbolsQM(): Promise { - return this.syncSymbolsFromProvider({ provider: 'qm', clearFirst: false }); - } - - @Operation('sync-symbols-eod') - async syncSymbolsEOD(payload: { provider: string; clearFirst?: boolean }): Promise { - return this.syncSymbolsFromProvider({ ...payload, provider: 'eod' }); - } - - @Operation('sync-symbols-ib') - async syncSymbolsIB(payload: { provider: string; clearFirst?: boolean }): Promise { - return this.syncSymbolsFromProvider({ ...payload, provider: 'ib' }); - } - - /** - * Get sync status for symbols - */ - @Operation('sync-status') - async getSyncStatus(): Promise { - this.log('info', 'Getting symbol sync status...'); - return getSyncStatus({}, this.services); - } - - /** - * Internal method to sync symbols from a provider - */ - private async syncSymbolsFromProvider(payload: { provider: string; clearFirst?: boolean }): Promise { - this.log('info', 'Syncing symbols from provider', { provider: payload.provider }); - return syncSymbolsFromProvider(payload, this.services); - } -} \ No newline at end of file +import { + BaseHandler, + Handler, + Operation, + ScheduledOperation, + type IServiceContainer, +} from '@stock-bot/handlers'; +import { syncQMSymbols } from './operations/qm-symbols.operations'; +import { getSyncStatus } from './operations/sync-status.operations'; +import { syncSymbolsFromProvider } from './operations/sync-symbols-from-provider.operations'; + +@Handler('symbols') +class SymbolsHandler extends BaseHandler { + constructor(services: IServiceContainer) { + super(services); + } + + /** + * Sync symbols from QuestionsAndMethods API + */ + @ScheduledOperation('sync-qm-symbols', '0 2 * * *', { + priority: 5, + description: 'Daily sync of QM symbols at 2 AM', + }) + async syncQMSymbols(): Promise<{ processed: number; created: number; updated: number }> { + this.log('info', 'Starting QM symbols sync...'); + return syncQMSymbols({}, this.services); + } + + /** + * Sync symbols from specific provider + */ + @Operation('sync-symbols-qm') + @ScheduledOperation('sync-symbols-qm', '0 4 * * *', { + priority: 5, + description: 'Daily sync of symbols from QM provider at 4 AM', + }) + async syncSymbolsQM(): Promise { + return this.syncSymbolsFromProvider({ provider: 'qm', clearFirst: false }); + } + + @Operation('sync-symbols-eod') + async syncSymbolsEOD(payload: { provider: string; clearFirst?: boolean }): Promise { + return this.syncSymbolsFromProvider({ ...payload, provider: 'eod' }); + } + + @Operation('sync-symbols-ib') + async syncSymbolsIB(payload: { provider: string; clearFirst?: boolean }): Promise { + return this.syncSymbolsFromProvider({ ...payload, provider: 'ib' }); + } + + /** + * Get sync status for symbols + */ + @Operation('sync-status') + async getSyncStatus(): Promise { + this.log('info', 'Getting symbol sync status...'); + return getSyncStatus({}, this.services); + } + + /** + * Internal method to sync symbols from a provider + */ + private async syncSymbolsFromProvider(payload: { + provider: string; + clearFirst?: boolean; + }): Promise { + this.log('info', 'Syncing symbols from provider', { provider: payload.provider }); + return syncSymbolsFromProvider(payload, this.services); + } +} diff --git a/apps/stock/data-pipeline/src/index.ts b/apps/stock/data-pipeline/src/index.ts index 36df5e0..df4322f 100644 --- a/apps/stock/data-pipeline/src/index.ts +++ b/apps/stock/data-pipeline/src/index.ts @@ -3,14 +3,13 @@ * Simplified entry point using ServiceApplication framework */ -import { initializeStockConfig } from '@stock-bot/stock-config'; import { ServiceApplication } from '@stock-bot/di'; import { getLogger } from '@stock-bot/logger'; - -// Local imports -import { initializeAllHandlers } from './handlers'; +import { initializeStockConfig } from '@stock-bot/stock-config'; import { createRoutes } from './routes/create-routes'; import { setupServiceContainer } from './container-setup'; +// Local imports +import { initializeAllHandlers } from './handlers'; // Initialize configuration with service-specific overrides const config = initializeStockConfig('dataPipeline'); @@ -43,12 +42,12 @@ const app = new ServiceApplication( }, { // Custom lifecycle hooks - onContainerReady: (container) => { + onContainerReady: container => { // Setup service-specific configuration const enhancedContainer = setupServiceContainer(config, container); return enhancedContainer; }, - onStarted: (_port) => { + onStarted: _port => { const logger = getLogger('data-pipeline'); logger.info('Data pipeline service startup initiated with ServiceApplication framework'); }, @@ -59,7 +58,7 @@ const app = new ServiceApplication( async function createContainer(config: any) { const { ServiceContainerBuilder } = await import('@stock-bot/di'); const builder = new ServiceContainerBuilder(); - + const container = await builder .withConfig(config) .withOptions({ @@ -74,7 +73,7 @@ async function createContainer(config: any) { skipInitialization: false, // Let builder handle initialization }) .build(); - + return container; } @@ -83,4 +82,4 @@ app.start(createContainer, createRoutes, initializeAllHandlers).catch(error => { const logger = getLogger('data-pipeline'); logger.fatal('Failed to start data pipeline service', { error }); process.exit(1); -}); \ No newline at end of file +}); diff --git a/apps/stock/data-pipeline/src/routes/create-routes.ts b/apps/stock/data-pipeline/src/routes/create-routes.ts index 13bf479..356ba42 100644 --- a/apps/stock/data-pipeline/src/routes/create-routes.ts +++ b/apps/stock/data-pipeline/src/routes/create-routes.ts @@ -1,29 +1,29 @@ -/** - * Route factory for data pipeline service - * Creates routes with access to the service container - */ - -import { Hono } from 'hono'; -import type { IServiceContainer } from '@stock-bot/handlers'; -import { healthRoutes } from './health.routes'; -import { createSyncRoutes } from './sync.routes'; -import { createEnhancedSyncRoutes } from './enhanced-sync.routes'; -import { createStatsRoutes } from './stats.routes'; - -export function createRoutes(container: IServiceContainer): Hono { - const app = new Hono(); - - // Add container to context for all routes - app.use('*', async (c, next) => { - c.set('container', container); - await next(); - }); - - // Mount routes - app.route('/health', healthRoutes); - app.route('/sync', createSyncRoutes(container)); - app.route('/sync', createEnhancedSyncRoutes(container)); - app.route('/sync/stats', createStatsRoutes(container)); - - return app; -} \ No newline at end of file +/** + * Route factory for data pipeline service + * Creates routes with access to the service container + */ + +import { Hono } from 'hono'; +import type { IServiceContainer } from '@stock-bot/handlers'; +import { createEnhancedSyncRoutes } from './enhanced-sync.routes'; +import { healthRoutes } from './health.routes'; +import { createStatsRoutes } from './stats.routes'; +import { createSyncRoutes } from './sync.routes'; + +export function createRoutes(container: IServiceContainer): Hono { + const app = new Hono(); + + // Add container to context for all routes + app.use('*', async (c, next) => { + c.set('container', container); + await next(); + }); + + // Mount routes + app.route('/health', healthRoutes); + app.route('/sync', createSyncRoutes(container)); + app.route('/sync', createEnhancedSyncRoutes(container)); + app.route('/sync/stats', createStatsRoutes(container)); + + return app; +} diff --git a/apps/stock/data-pipeline/src/routes/enhanced-sync.routes.ts b/apps/stock/data-pipeline/src/routes/enhanced-sync.routes.ts index 4b0e6e8..cc6ff28 100644 --- a/apps/stock/data-pipeline/src/routes/enhanced-sync.routes.ts +++ b/apps/stock/data-pipeline/src/routes/enhanced-sync.routes.ts @@ -1,6 +1,6 @@ import { Hono } from 'hono'; -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; const logger = getLogger('enhanced-sync-routes'); @@ -15,7 +15,7 @@ export function createEnhancedSyncRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ success: false, error: 'Queue manager not available' }, 503); } - + const exchangesQueue = queueManager.getQueue('exchanges'); const job = await exchangesQueue.addJob('sync-all-exchanges', { @@ -40,7 +40,7 @@ export function createEnhancedSyncRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ success: false, error: 'Queue manager not available' }, 503); } - + const exchangesQueue = queueManager.getQueue('exchanges'); const job = await exchangesQueue.addJob('sync-qm-provider-mappings', { @@ -69,7 +69,7 @@ export function createEnhancedSyncRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ success: false, error: 'Queue manager not available' }, 503); } - + const exchangesQueue = queueManager.getQueue('exchanges'); const job = await exchangesQueue.addJob('sync-ib-exchanges', { @@ -98,7 +98,7 @@ export function createEnhancedSyncRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ success: false, error: 'Queue manager not available' }, 503); } - + const symbolsQueue = queueManager.getQueue('symbols'); const job = await symbolsQueue.addJob('sync-status', { @@ -124,7 +124,7 @@ export function createEnhancedSyncRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ success: false, error: 'Queue manager not available' }, 503); } - + const exchangesQueue = queueManager.getQueue('exchanges'); const job = await exchangesQueue.addJob('clear-postgresql-data', { @@ -148,4 +148,4 @@ export function createEnhancedSyncRoutes(container: IServiceContainer) { }); return enhancedSync; -} \ No newline at end of file +} diff --git a/apps/stock/data-pipeline/src/routes/stats.routes.ts b/apps/stock/data-pipeline/src/routes/stats.routes.ts index 90cc0b2..4340e98 100644 --- a/apps/stock/data-pipeline/src/routes/stats.routes.ts +++ b/apps/stock/data-pipeline/src/routes/stats.routes.ts @@ -1,6 +1,6 @@ import { Hono } from 'hono'; -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; const logger = getLogger('stats-routes'); @@ -14,7 +14,7 @@ export function createStatsRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ error: 'Queue manager not available' }, 503); } - + const exchangesQueue = queueManager.getQueue('exchanges'); const job = await exchangesQueue.addJob('get-exchange-stats', { @@ -38,7 +38,7 @@ export function createStatsRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ error: 'Queue manager not available' }, 503); } - + const exchangesQueue = queueManager.getQueue('exchanges'); const job = await exchangesQueue.addJob('get-provider-mapping-stats', { @@ -57,4 +57,4 @@ export function createStatsRoutes(container: IServiceContainer) { }); return stats; -} \ No newline at end of file +} diff --git a/apps/stock/data-pipeline/src/routes/sync.routes.ts b/apps/stock/data-pipeline/src/routes/sync.routes.ts index 59a6456..99ad839 100644 --- a/apps/stock/data-pipeline/src/routes/sync.routes.ts +++ b/apps/stock/data-pipeline/src/routes/sync.routes.ts @@ -1,6 +1,6 @@ import { Hono } from 'hono'; -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; const logger = getLogger('sync-routes'); @@ -14,7 +14,7 @@ export function createSyncRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ success: false, error: 'Queue manager not available' }, 503); } - + const symbolsQueue = queueManager.getQueue('symbols'); const job = await symbolsQueue.addJob('sync-qm-symbols', { @@ -39,7 +39,7 @@ export function createSyncRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ success: false, error: 'Queue manager not available' }, 503); } - + const exchangesQueue = queueManager.getQueue('exchanges'); const job = await exchangesQueue.addJob('sync-qm-exchanges', { @@ -65,7 +65,7 @@ export function createSyncRoutes(container: IServiceContainer) { if (!queueManager) { return c.json({ success: false, error: 'Queue manager not available' }, 503); } - + const symbolsQueue = queueManager.getQueue('symbols'); const job = await symbolsQueue.addJob('sync-symbols-from-provider', { @@ -89,4 +89,4 @@ export function createSyncRoutes(container: IServiceContainer) { }); return sync; -} \ No newline at end of file +} diff --git a/apps/stock/package.json b/apps/stock/package.json index cd8c82c..91ceb42 100644 --- a/apps/stock/package.json +++ b/apps/stock/package.json @@ -1,91 +1,81 @@ -{ - "name": "@stock-bot/stock-app", - "version": "1.0.0", - "private": true, - "description": "Stock trading bot application", - "scripts": { - "dev": "turbo run dev", - "dev:ingestion": "cd data-ingestion && bun run dev", - "dev:pipeline": "cd data-pipeline && bun run dev", - "dev:api": "cd web-api && bun run dev", - "dev:web": "cd web-app && bun run dev", - "dev:backend": "turbo run dev --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-api\"", - "dev:frontend": "turbo run dev --filter=\"@stock-bot/web-app\"", - - "build": "turbo run build", - "build:config": "cd config && bun run build", - "build:services": "turbo run build --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-*\"", - "build:ingestion": "cd data-ingestion && bun run build", - "build:pipeline": "cd data-pipeline && bun run build", - "build:api": "cd web-api && bun run build", - "build:web": "cd web-app && bun run build", - - "start": "turbo run start --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-api\"", - "start:all": "turbo run start", - "start:ingestion": "cd data-ingestion && bun start", - "start:pipeline": "cd data-pipeline && bun start", - "start:api": "cd web-api && bun start", - - "clean": "turbo run clean", - "clean:all": "turbo run clean && rm -rf node_modules", - "clean:ingestion": "cd data-ingestion && rm -rf dist node_modules", - "clean:pipeline": "cd data-pipeline && rm -rf dist node_modules", - "clean:api": "cd web-api && rm -rf dist node_modules", - "clean:web": "cd web-app && rm -rf dist node_modules", - "clean:config": "cd config && rm -rf dist node_modules", - - "test": "turbo run test", - "test:all": "turbo run test", - "test:config": "cd config && bun test", - "test:services": "turbo run test --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-*\"", - "test:ingestion": "cd data-ingestion && bun test", - "test:pipeline": "cd data-pipeline && bun test", - "test:api": "cd web-api && bun test", - - "lint": "turbo run lint", - "lint:all": "turbo run lint", - "lint:config": "cd config && bun run lint", - "lint:services": "turbo run lint --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-*\"", - "lint:ingestion": "cd data-ingestion && bun run lint", - "lint:pipeline": "cd data-pipeline && bun run lint", - "lint:api": "cd web-api && bun run lint", - "lint:web": "cd web-app && bun run lint", - - "install:all": "bun install", - - "docker:build": "docker-compose build", - "docker:up": "docker-compose up", - "docker:down": "docker-compose down", - - "pm2:start": "pm2 start ecosystem.config.js", - "pm2:stop": "pm2 stop all", - "pm2:restart": "pm2 restart all", - "pm2:logs": "pm2 logs", - "pm2:status": "pm2 status", - - "db:migrate": "cd data-ingestion && bun run db:migrate", - "db:seed": "cd data-ingestion && bun run db:seed", - - "health:check": "bun scripts/health-check.js", - "monitor": "bun run pm2:logs", - "status": "bun run pm2:status" - }, - "devDependencies": { - "pm2": "^5.3.0", - "@types/node": "^20.11.0", - "typescript": "^5.3.3", - "turbo": "^2.5.4" - }, - "workspaces": [ - "config", - "data-ingestion", - "data-pipeline", - "web-api", - "web-app" - ], - "engines": { - "node": ">=18.0.0", - "bun": ">=1.1.0" - }, - "packageManager": "bun@1.1.12" -} \ No newline at end of file +{ + "name": "@stock-bot/stock-app", + "version": "1.0.0", + "private": true, + "description": "Stock trading bot application", + "scripts": { + "dev": "turbo run dev", + "dev:ingestion": "cd data-ingestion && bun run dev", + "dev:pipeline": "cd data-pipeline && bun run dev", + "dev:api": "cd web-api && bun run dev", + "dev:web": "cd web-app && bun run dev", + "dev:backend": "turbo run dev --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-api\"", + "dev:frontend": "turbo run dev --filter=\"@stock-bot/web-app\"", + "build": "echo 'Stock apps built via parent turbo'", + "build:config": "cd config && bun run build", + "build:services": "turbo run build --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-*\"", + "build:ingestion": "cd data-ingestion && bun run build", + "build:pipeline": "cd data-pipeline && bun run build", + "build:api": "cd web-api && bun run build", + "build:web": "cd web-app && bun run build", + "start": "turbo run start --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-api\"", + "start:all": "turbo run start", + "start:ingestion": "cd data-ingestion && bun start", + "start:pipeline": "cd data-pipeline && bun start", + "start:api": "cd web-api && bun start", + "clean": "turbo run clean", + "clean:all": "turbo run clean && rm -rf node_modules", + "clean:ingestion": "cd data-ingestion && rm -rf dist node_modules", + "clean:pipeline": "cd data-pipeline && rm -rf dist node_modules", + "clean:api": "cd web-api && rm -rf dist node_modules", + "clean:web": "cd web-app && rm -rf dist node_modules", + "clean:config": "cd config && rm -rf dist node_modules", + "test": "turbo run test", + "test:all": "turbo run test", + "test:config": "cd config && bun test", + "test:services": "turbo run test --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-*\"", + "test:ingestion": "cd data-ingestion && bun test", + "test:pipeline": "cd data-pipeline && bun test", + "test:api": "cd web-api && bun test", + "lint": "turbo run lint", + "lint:all": "turbo run lint", + "lint:config": "cd config && bun run lint", + "lint:services": "turbo run lint --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-*\"", + "lint:ingestion": "cd data-ingestion && bun run lint", + "lint:pipeline": "cd data-pipeline && bun run lint", + "lint:api": "cd web-api && bun run lint", + "lint:web": "cd web-app && bun run lint", + "install:all": "bun install", + "docker:build": "docker-compose build", + "docker:up": "docker-compose up", + "docker:down": "docker-compose down", + "pm2:start": "pm2 start ecosystem.config.js", + "pm2:stop": "pm2 stop all", + "pm2:restart": "pm2 restart all", + "pm2:logs": "pm2 logs", + "pm2:status": "pm2 status", + "db:migrate": "cd data-ingestion && bun run db:migrate", + "db:seed": "cd data-ingestion && bun run db:seed", + "health:check": "bun scripts/health-check.js", + "monitor": "bun run pm2:logs", + "status": "bun run pm2:status" + }, + "devDependencies": { + "pm2": "^5.3.0", + "@types/node": "^20.11.0", + "typescript": "^5.3.3", + "turbo": "^2.5.4" + }, + "workspaces": [ + "config", + "data-ingestion", + "data-pipeline", + "web-api", + "web-app" + ], + "engines": { + "node": ">=18.0.0", + "bun": ">=1.1.0" + }, + "packageManager": "bun@1.1.12" +} diff --git a/apps/stock/tsconfig.json b/apps/stock/tsconfig.json index 5d62c04..c8ae20d 100644 --- a/apps/stock/tsconfig.json +++ b/apps/stock/tsconfig.json @@ -1,18 +1,18 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "baseUrl": "../..", - "paths": { - "@stock-bot/*": ["libs/*/src"], - "@stock-bot/stock-config": ["apps/stock/config/src"], - "@stock-bot/stock-config/*": ["apps/stock/config/src/*"] - } - }, - "references": [ - { "path": "./config" }, - { "path": "./data-ingestion" }, - { "path": "./data-pipeline" }, - { "path": "./web-api" }, - { "path": "./web-app" } - ] -} \ No newline at end of file +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "baseUrl": "../..", + "paths": { + "@stock-bot/*": ["libs/*/src"], + "@stock-bot/stock-config": ["apps/stock/config/src"], + "@stock-bot/stock-config/*": ["apps/stock/config/src/*"] + } + }, + "references": [ + { "path": "./config" }, + { "path": "./data-ingestion" }, + { "path": "./data-pipeline" }, + { "path": "./web-api" }, + { "path": "./web-app" } + ] +} diff --git a/apps/stock/web-api/src/container-setup.ts b/apps/stock/web-api/src/container-setup.ts index 2cec315..9e5ff9e 100644 --- a/apps/stock/web-api/src/container-setup.ts +++ b/apps/stock/web-api/src/container-setup.ts @@ -1,34 +1,34 @@ -/** - * Service Container Setup for Web API - * Configures dependency injection for the web API service - */ - -import type { IServiceContainer } from '@stock-bot/handlers'; -import { getLogger } from '@stock-bot/logger'; -import type { AppConfig } from '@stock-bot/config'; - -const logger = getLogger('web-api-container'); - -/** - * Configure the service container for web API workloads - */ -export function setupServiceContainer( - config: AppConfig, - container: IServiceContainer -): IServiceContainer { - logger.info('Configuring web API service container...'); - - // Web API specific configuration - // This service mainly reads data, so smaller pool sizes are fine - const poolSizes = { - mongodb: config.environment === 'production' ? 20 : 10, - postgres: config.environment === 'production' ? 30 : 15, - cache: config.environment === 'production' ? 20 : 10, - }; - - logger.info('Web API pool sizes configured', poolSizes); - - // The container is already configured with connections - // Just return it with our logging - return container; -} \ No newline at end of file +/** + * Service Container Setup for Web API + * Configures dependency injection for the web API service + */ + +import type { AppConfig } from '@stock-bot/config'; +import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; + +const logger = getLogger('web-api-container'); + +/** + * Configure the service container for web API workloads + */ +export function setupServiceContainer( + config: AppConfig, + container: IServiceContainer +): IServiceContainer { + logger.info('Configuring web API service container...'); + + // Web API specific configuration + // This service mainly reads data, so smaller pool sizes are fine + const poolSizes = { + mongodb: config.environment === 'production' ? 20 : 10, + postgres: config.environment === 'production' ? 30 : 15, + cache: config.environment === 'production' ? 20 : 10, + }; + + logger.info('Web API pool sizes configured', poolSizes); + + // The container is already configured with connections + // Just return it with our logging + return container; +} diff --git a/apps/stock/web-api/src/index.ts b/apps/stock/web-api/src/index.ts index e4a4957..e7778f9 100644 --- a/apps/stock/web-api/src/index.ts +++ b/apps/stock/web-api/src/index.ts @@ -3,10 +3,9 @@ * Simplified entry point using ServiceApplication framework */ -import { initializeStockConfig } from '@stock-bot/stock-config'; import { ServiceApplication } from '@stock-bot/di'; import { getLogger } from '@stock-bot/logger'; - +import { initializeStockConfig } from '@stock-bot/stock-config'; // Local imports import { createRoutes } from './routes/create-routes'; @@ -49,7 +48,7 @@ const app = new ServiceApplication( }, { // Custom lifecycle hooks - onStarted: (_port) => { + onStarted: _port => { const logger = getLogger('web-api'); logger.info('Web API service startup initiated with ServiceApplication framework'); }, @@ -59,7 +58,7 @@ const app = new ServiceApplication( // Container factory function async function createContainer(config: any) { const { ServiceContainerBuilder } = await import('@stock-bot/di'); - + const container = await new ServiceContainerBuilder() .withConfig(config) .withOptions({ @@ -72,7 +71,7 @@ async function createContainer(config: any) { enableProxy: false, // Web API doesn't need proxy }) .build(); // This automatically initializes services - + return container; } @@ -81,4 +80,4 @@ app.start(createContainer, createRoutes).catch(error => { const logger = getLogger('web-api'); logger.fatal('Failed to start web API service', { error }); process.exit(1); -}); \ No newline at end of file +}); diff --git a/apps/stock/web-api/src/routes/create-routes.ts b/apps/stock/web-api/src/routes/create-routes.ts index 6f6eee3..ab2876e 100644 --- a/apps/stock/web-api/src/routes/create-routes.ts +++ b/apps/stock/web-api/src/routes/create-routes.ts @@ -5,8 +5,8 @@ import { Hono } from 'hono'; import type { IServiceContainer } from '@stock-bot/handlers'; -import { createHealthRoutes } from './health.routes'; import { createExchangeRoutes } from './exchange.routes'; +import { createHealthRoutes } from './health.routes'; import { createMonitoringRoutes } from './monitoring.routes'; import { createPipelineRoutes } from './pipeline.routes'; @@ -26,4 +26,4 @@ export function createRoutes(container: IServiceContainer): Hono { app.route('/api/pipeline', pipelineRoutes); return app; -} \ No newline at end of file +} diff --git a/apps/stock/web-api/src/routes/exchange.routes.ts b/apps/stock/web-api/src/routes/exchange.routes.ts index fd33cf6..d4b9af0 100644 --- a/apps/stock/web-api/src/routes/exchange.routes.ts +++ b/apps/stock/web-api/src/routes/exchange.routes.ts @@ -2,8 +2,8 @@ * Exchange management routes - Refactored */ import { Hono } from 'hono'; +import type { IServiceContainer } from '@stock-bot/types'; import { getLogger } from '@stock-bot/logger'; -import type { IServiceContainer } from '@stock-bot/handlers'; import { createExchangeService } from '../services/exchange.service'; import { createSuccessResponse, handleError } from '../utils/error-handler'; import { @@ -259,4 +259,4 @@ export function createExchangeRoutes(container: IServiceContainer) { }); return exchangeRoutes; -} \ No newline at end of file +} diff --git a/apps/stock/web-api/src/routes/health.routes.ts b/apps/stock/web-api/src/routes/health.routes.ts index 3398c21..769bf20 100644 --- a/apps/stock/web-api/src/routes/health.routes.ts +++ b/apps/stock/web-api/src/routes/health.routes.ts @@ -2,8 +2,8 @@ * Health check routes factory */ import { Hono } from 'hono'; -import { getLogger } from '@stock-bot/logger'; import type { IServiceContainer } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; const logger = getLogger('health-routes'); @@ -70,7 +70,10 @@ export function createHealthRoutes(container: IServiceContainer) { health.checks.postgresql = { status: 'healthy', message: 'Connected and responsive' }; logger.debug('PostgreSQL health check passed'); } else { - health.checks.postgresql = { status: 'unhealthy', message: 'PostgreSQL client not available' }; + health.checks.postgresql = { + status: 'unhealthy', + message: 'PostgreSQL client not available', + }; logger.warn('PostgreSQL health check failed - client not available'); } } catch (error) { @@ -108,4 +111,4 @@ export function createHealthRoutes(container: IServiceContainer) { } // Export legacy routes for backward compatibility during migration -export const healthRoutes = createHealthRoutes({} as IServiceContainer); \ No newline at end of file +export const healthRoutes = createHealthRoutes({} as IServiceContainer); diff --git a/apps/stock/web-api/src/routes/monitoring.routes.ts b/apps/stock/web-api/src/routes/monitoring.routes.ts index 89be314..652bf43 100644 --- a/apps/stock/web-api/src/routes/monitoring.routes.ts +++ b/apps/stock/web-api/src/routes/monitoring.routes.ts @@ -13,167 +13,200 @@ export function createMonitoringRoutes(container: IServiceContainer) { /** * Get overall system health */ - monitoring.get('/', async (c) => { + monitoring.get('/', async c => { try { const health = await monitoringService.getSystemHealth(); - + // Set appropriate status code based on health - const statusCode = health.status === 'healthy' ? 200 : - health.status === 'degraded' ? 503 : 500; - + const statusCode = + health.status === 'healthy' ? 200 : health.status === 'degraded' ? 503 : 500; + return c.json(health, statusCode); } catch (error) { - return c.json({ - status: 'error', - message: 'Failed to retrieve system health', - error: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + status: 'error', + message: 'Failed to retrieve system health', + error: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Get cache/Dragonfly statistics */ - monitoring.get('/cache', async (c) => { + monitoring.get('/cache', async c => { try { const stats = await monitoringService.getCacheStats(); return c.json(stats); } catch (error) { - return c.json({ - error: 'Failed to retrieve cache statistics', - message: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + error: 'Failed to retrieve cache statistics', + message: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Get queue statistics */ - monitoring.get('/queues', async (c) => { + monitoring.get('/queues', async c => { try { const stats = await monitoringService.getQueueStats(); return c.json({ queues: stats }); } catch (error) { - return c.json({ - error: 'Failed to retrieve queue statistics', - message: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + error: 'Failed to retrieve queue statistics', + message: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Get specific queue statistics */ - monitoring.get('/queues/:name', async (c) => { + monitoring.get('/queues/:name', async c => { try { const queueName = c.req.param('name'); const stats = await monitoringService.getQueueStats(); const queueStats = stats.find(q => q.name === queueName); - + if (!queueStats) { - return c.json({ - error: 'Queue not found', - message: `Queue '${queueName}' does not exist`, - }, 404); + return c.json( + { + error: 'Queue not found', + message: `Queue '${queueName}' does not exist`, + }, + 404 + ); } - + return c.json(queueStats); } catch (error) { - return c.json({ - error: 'Failed to retrieve queue statistics', - message: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + error: 'Failed to retrieve queue statistics', + message: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Get database statistics */ - monitoring.get('/databases', async (c) => { + monitoring.get('/databases', async c => { try { const stats = await monitoringService.getDatabaseStats(); return c.json({ databases: stats }); } catch (error) { - return c.json({ - error: 'Failed to retrieve database statistics', - message: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + error: 'Failed to retrieve database statistics', + message: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Get specific database statistics */ - monitoring.get('/databases/:type', async (c) => { + monitoring.get('/databases/:type', async c => { try { const dbType = c.req.param('type') as 'postgres' | 'mongodb' | 'questdb'; const stats = await monitoringService.getDatabaseStats(); const dbStats = stats.find(db => db.type === dbType); - + if (!dbStats) { - return c.json({ - error: 'Database not found', - message: `Database type '${dbType}' not found or not enabled`, - }, 404); + return c.json( + { + error: 'Database not found', + message: `Database type '${dbType}' not found or not enabled`, + }, + 404 + ); } - + return c.json(dbStats); } catch (error) { - return c.json({ - error: 'Failed to retrieve database statistics', - message: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + error: 'Failed to retrieve database statistics', + message: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Get service metrics */ - monitoring.get('/metrics', async (c) => { + monitoring.get('/metrics', async c => { try { const metrics = await monitoringService.getServiceMetrics(); return c.json(metrics); } catch (error) { - return c.json({ - error: 'Failed to retrieve service metrics', - message: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + error: 'Failed to retrieve service metrics', + message: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Get detailed cache info (Redis INFO command output) */ - monitoring.get('/cache/info', async (c) => { + monitoring.get('/cache/info', async c => { try { if (!container.cache) { - return c.json({ - error: 'Cache not available', - message: 'Cache service is not enabled', - }, 503); + return c.json( + { + error: 'Cache not available', + message: 'Cache service is not enabled', + }, + 503 + ); } - + const info = await container.cache.info(); const stats = await monitoringService.getCacheStats(); - + return c.json({ parsed: stats, raw: info, }); } catch (error) { - return c.json({ - error: 'Failed to retrieve cache info', - message: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + error: 'Failed to retrieve cache info', + message: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Health check endpoint for monitoring */ - monitoring.get('/ping', (c) => { - return c.json({ - status: 'ok', + monitoring.get('/ping', c => { + return c.json({ + status: 'ok', timestamp: new Date().toISOString(), service: 'monitoring', }); @@ -182,78 +215,90 @@ export function createMonitoringRoutes(container: IServiceContainer) { /** * Get service status for all microservices */ - monitoring.get('/services', async (c) => { + monitoring.get('/services', async c => { try { const services = await monitoringService.getServiceStatus(); return c.json({ services }); } catch (error) { - return c.json({ - error: 'Failed to retrieve service status', - message: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + error: 'Failed to retrieve service status', + message: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Get proxy statistics */ - monitoring.get('/proxies', async (c) => { + monitoring.get('/proxies', async c => { try { const stats = await monitoringService.getProxyStats(); return c.json(stats || { enabled: false }); } catch (error) { - return c.json({ - error: 'Failed to retrieve proxy statistics', - message: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + error: 'Failed to retrieve proxy statistics', + message: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Get comprehensive system overview */ - monitoring.get('/overview', async (c) => { + monitoring.get('/overview', async c => { try { const overview = await monitoringService.getSystemOverview(); return c.json(overview); } catch (error) { - return c.json({ - error: 'Failed to retrieve system overview', - message: error instanceof Error ? error.message : 'Unknown error', - }, 500); + return c.json( + { + error: 'Failed to retrieve system overview', + message: error instanceof Error ? error.message : 'Unknown error', + }, + 500 + ); } }); /** * Test direct BullMQ queue access */ - monitoring.get('/test/queue/:name', async (c) => { + monitoring.get('/test/queue/:name', async c => { const queueName = c.req.param('name'); const { Queue } = await import('bullmq'); - + const connection = { host: 'localhost', port: 6379, - db: 0, // All queues in DB 0 + db: 0, // All queues in DB 0 }; - + const queue = new Queue(queueName, { connection }); - + try { const counts = await queue.getJobCounts(); await queue.close(); - return c.json({ + return c.json({ queueName, - counts + counts, }); } catch (error: any) { await queue.close(); - return c.json({ - queueName, - error: error.message - }, 500); + return c.json( + { + queueName, + error: error.message, + }, + 500 + ); } }); return monitoring; -} \ No newline at end of file +} diff --git a/apps/stock/web-api/src/routes/pipeline.routes.ts b/apps/stock/web-api/src/routes/pipeline.routes.ts index 1e19fc2..202fb19 100644 --- a/apps/stock/web-api/src/routes/pipeline.routes.ts +++ b/apps/stock/web-api/src/routes/pipeline.routes.ts @@ -132,4 +132,4 @@ export function createPipelineRoutes(container: IServiceContainer) { }); return pipeline; -} \ No newline at end of file +} diff --git a/apps/stock/web-api/src/services/exchange.service.ts b/apps/stock/web-api/src/services/exchange.service.ts index cb48694..328cce4 100644 --- a/apps/stock/web-api/src/services/exchange.service.ts +++ b/apps/stock/web-api/src/services/exchange.service.ts @@ -1,5 +1,5 @@ +import type { IServiceContainer } from '@stock-bot/types'; import { getLogger } from '@stock-bot/logger'; -import type { IServiceContainer } from '@stock-bot/handlers'; import { CreateExchangeRequest, CreateProviderMappingRequest, @@ -380,4 +380,4 @@ export class ExchangeService { // Export function to create service instance with container export function createExchangeService(container: IServiceContainer): ExchangeService { return new ExchangeService(container); -} \ No newline at end of file +} diff --git a/apps/stock/web-api/src/services/monitoring.service.ts b/apps/stock/web-api/src/services/monitoring.service.ts index bf42e57..4864019 100644 --- a/apps/stock/web-api/src/services/monitoring.service.ts +++ b/apps/stock/web-api/src/services/monitoring.service.ts @@ -3,19 +3,19 @@ * Collects health and performance metrics from all system components */ +import * as os from 'os'; import type { IServiceContainer } from '@stock-bot/handlers'; import { getLogger } from '@stock-bot/logger'; -import type { - CacheStats, - QueueStats, - DatabaseStats, - SystemHealth, +import type { + CacheStats, + DatabaseStats, + ProxyStats, + QueueStats, ServiceMetrics, ServiceStatus, - ProxyStats, - SystemOverview + SystemHealth, + SystemOverview, } from '../types/monitoring.types'; -import * as os from 'os'; export class MonitoringService { private readonly logger = getLogger('monitoring-service'); @@ -46,7 +46,7 @@ export class MonitoringService { // Get cache stats from the provider const cacheStats = this.container.cache.getStats(); - + // Since we can't access Redis info directly, we'll use what's available return { provider: 'dragonfly', @@ -74,7 +74,7 @@ export class MonitoringService { */ async getQueueStats(): Promise { const stats: QueueStats[] = []; - + try { if (!this.container.queue) { this.logger.warn('No queue manager available'); @@ -83,27 +83,27 @@ export class MonitoringService { // Get all queue names from the SmartQueueManager const queueManager = this.container.queue as any; - this.logger.debug('Queue manager type:', { + this.logger.debug('Queue manager type:', { type: queueManager.constructor.name, hasGetAllQueues: typeof queueManager.getAllQueues === 'function', hasQueues: !!queueManager.queues, - hasGetQueue: typeof queueManager.getQueue === 'function' + hasGetQueue: typeof queueManager.getQueue === 'function', }); - + // Always use the known queue names since web-api doesn't create worker queues const handlerMapping = { - 'proxy': 'data-ingestion', - 'qm': 'data-ingestion', - 'ib': 'data-ingestion', - 'ceo': 'data-ingestion', - 'webshare': 'data-ingestion', - 'exchanges': 'data-pipeline', - 'symbols': 'data-pipeline', + proxy: 'data-ingestion', + qm: 'data-ingestion', + ib: 'data-ingestion', + ceo: 'data-ingestion', + webshare: 'data-ingestion', + exchanges: 'data-pipeline', + symbols: 'data-pipeline', }; - + const queueNames = Object.keys(handlerMapping); this.logger.debug('Using known queue names', { count: queueNames.length, names: queueNames }); - + // Create BullMQ queues directly with the correct format for (const handlerName of queueNames) { try { @@ -114,17 +114,17 @@ export class MonitoringService { port: 6379, db: 0, // All queues now in DB 0 }; - + // Get the service that owns this handler const serviceName = handlerMapping[handlerName as keyof typeof handlerMapping]; - + // Create BullMQ queue with the new naming format {service_handler} const fullQueueName = `{${serviceName}_${handlerName}}`; const bullQueue = new BullMQQueue(fullQueueName, { connection }); - + // Get stats directly from BullMQ const queueStats = await this.getQueueStatsForBullQueue(bullQueue, handlerName); - + stats.push({ name: handlerName, connected: true, @@ -134,7 +134,7 @@ export class MonitoringService { concurrency: 1, }, }); - + // Close the queue connection after getting stats await bullQueue.close(); } catch (error) { @@ -167,7 +167,7 @@ export class MonitoringService { try { // BullMQ provides getJobCounts which returns all counts at once const counts = await bullQueue.getJobCounts(); - + return { waiting: counts.waiting || 0, active: counts.active || 0, @@ -184,11 +184,11 @@ export class MonitoringService { try { const [waiting, active, completed, failed, delayed, paused] = await Promise.all([ bullQueue.getWaitingCount(), - bullQueue.getActiveCount(), + bullQueue.getActiveCount(), bullQueue.getCompletedCount(), bullQueue.getFailedCount(), bullQueue.getDelayedCount(), - bullQueue.getPausedCount ? bullQueue.getPausedCount() : 0 + bullQueue.getPausedCount ? bullQueue.getPausedCount() : 0, ]); return { @@ -222,7 +222,7 @@ export class MonitoringService { paused: stats.paused || 0, }; } - + // Try individual count methods const [waiting, active, completed, failed, delayed] = await Promise.all([ this.safeGetCount(queue, 'getWaitingCount', 'getWaiting'), @@ -252,7 +252,7 @@ export class MonitoringService { if (queue[methodName] && typeof queue[methodName] === 'function') { try { const result = await queue[methodName](); - return Array.isArray(result) ? result.length : (result || 0); + return Array.isArray(result) ? result.length : result || 0; } catch (_e) { // Continue to next method } @@ -291,7 +291,7 @@ export class MonitoringService { concurrency: queue.workers[0]?.concurrency || 1, }; } - + // Check queue manager for worker config if (queueManager.config?.defaultQueueOptions) { const options = queueManager.config.defaultQueueOptions; @@ -300,7 +300,7 @@ export class MonitoringService { concurrency: options.concurrency || 1, }; } - + // Check for getWorkerCount method if (queue.getWorkerCount && typeof queue.getWorkerCount === 'function') { const count = queue.getWorkerCount(); @@ -312,7 +312,7 @@ export class MonitoringService { } catch (_e) { // Ignore } - + return undefined; } @@ -331,12 +331,14 @@ export class MonitoringService { // Get pool stats const pool = (this.container.postgres as any).pool; - const poolStats = pool ? { - size: pool.totalCount || 0, - active: pool.idleCount || 0, - idle: pool.waitingCount || 0, - max: pool.options?.max || 0, - } : undefined; + const poolStats = pool + ? { + size: pool.totalCount || 0, + active: pool.idleCount || 0, + idle: pool.waitingCount || 0, + max: pool.options?.max || 0, + } + : undefined; stats.push({ type: 'postgres', @@ -365,7 +367,7 @@ export class MonitoringService { const latency = Date.now() - startTime; const serverStatus = await db.admin().serverStatus(); - + stats.push({ type: 'mongodb', name: 'MongoDB', @@ -393,9 +395,11 @@ export class MonitoringService { try { const startTime = Date.now(); // QuestDB health check - const response = await fetch(`http://${process.env.QUESTDB_HOST || 'localhost'}:9000/exec?query=SELECT%201`); + const response = await fetch( + `http://${process.env.QUESTDB_HOST || 'localhost'}:9000/exec?query=SELECT%201` + ); const latency = Date.now() - startTime; - + stats.push({ type: 'questdb', name: 'QuestDB', @@ -432,23 +436,22 @@ export class MonitoringService { // Determine overall health status const errors: string[] = []; - + if (!cacheStats.connected) { errors.push('Cache service is disconnected'); } - + const disconnectedQueues = queueStats.filter(q => !q.connected); if (disconnectedQueues.length > 0) { errors.push(`${disconnectedQueues.length} queue(s) are disconnected`); } - + const disconnectedDbs = databaseStats.filter(db => !db.connected); if (disconnectedDbs.length > 0) { errors.push(`${disconnectedDbs.length} database(s) are disconnected`); } - const status = errors.length === 0 ? 'healthy' : - errors.length < 3 ? 'degraded' : 'unhealthy'; + const status = errors.length === 0 ? 'healthy' : errors.length < 3 ? 'degraded' : 'unhealthy'; return { status, @@ -478,7 +481,7 @@ export class MonitoringService { */ async getServiceMetrics(): Promise { const now = new Date().toISOString(); - + return { requestsPerSecond: { timestamp: now, @@ -517,12 +520,12 @@ export class MonitoringService { private parseRedisInfo(info: string): Record { const result: Record = {}; const sections = info.split('\r\n\r\n'); - + for (const section of sections) { const lines = section.split('\r\n'); const sectionName = lines[0]?.replace('# ', '') || 'general'; result[sectionName] = {}; - + for (let i = 1; i < lines.length; i++) { const [key, value] = lines[i].split(':'); if (key && value) { @@ -530,7 +533,7 @@ export class MonitoringService { } } } - + return result; } @@ -539,7 +542,7 @@ export class MonitoringService { */ async getServiceStatus(): Promise { const services: ServiceStatus[] = []; - + // Define service endpoints const serviceEndpoints = [ { name: 'data-ingestion', port: 2001, path: '/health' }, @@ -562,13 +565,13 @@ export class MonitoringService { }); continue; } - + const startTime = Date.now(); const response = await fetch(`http://localhost:${service.port}${service.path}`, { signal: AbortSignal.timeout(5000), // 5 second timeout }); const _latency = Date.now() - startTime; - + if (response.ok) { const data = await response.json(); services.push({ @@ -629,28 +632,28 @@ export class MonitoringService { // Get proxy data from cache using getRaw method // The proxy manager uses cache:proxy: prefix, but web-api cache uses cache:api: const cacheProvider = this.container.cache; - + if (cacheProvider.getRaw) { // Use getRaw to access data with different cache prefix // The proxy manager now uses a global cache:proxy: prefix this.logger.debug('Attempting to fetch proxy data from cache'); - + const [cachedProxies, lastUpdateStr] = await Promise.all([ cacheProvider.getRaw('cache:proxy:active'), - cacheProvider.getRaw('cache:proxy:last-update') + cacheProvider.getRaw('cache:proxy:last-update'), ]); - - this.logger.debug('Proxy cache data retrieved', { + + this.logger.debug('Proxy cache data retrieved', { hasProxies: !!cachedProxies, isArray: Array.isArray(cachedProxies), proxyCount: cachedProxies ? cachedProxies.length : 0, - lastUpdate: lastUpdateStr + lastUpdate: lastUpdateStr, }); - + if (cachedProxies && Array.isArray(cachedProxies)) { const workingCount = cachedProxies.filter((p: any) => p.isWorking !== false).length; const failedCount = cachedProxies.filter((p: any) => p.isWorking === false).length; - + return { enabled: true, totalProxies: cachedProxies.length, @@ -662,7 +665,7 @@ export class MonitoringService { } else { this.logger.debug('Cache provider does not support getRaw method'); } - + // No cached data found - proxies might not be initialized yet return { enabled: true, @@ -672,7 +675,7 @@ export class MonitoringService { }; } catch (cacheError) { this.logger.debug('Could not retrieve proxy data from cache', { error: cacheError }); - + // Return basic stats if cache query fails return { enabled: true, @@ -727,7 +730,7 @@ export class MonitoringService { const idle = totalIdle / cpus.length; const total = totalTick / cpus.length; - const usage = 100 - ~~(100 * idle / total); + const usage = 100 - ~~((100 * idle) / total); return { usage, @@ -742,21 +745,21 @@ export class MonitoringService { private getSystemMemory() { const totalMem = os.totalmem(); const freeMem = os.freemem(); - + // On Linux, freeMem includes buffers/cache, but we want "available" memory // which better represents memory that can be used by applications let availableMem = freeMem; - + // Try to read from /proc/meminfo for more accurate memory stats on Linux if (os.platform() === 'linux') { try { const fs = require('fs'); const meminfo = fs.readFileSync('/proc/meminfo', 'utf8'); const lines = meminfo.split('\n'); - + let memAvailable = 0; let _memTotal = 0; - + for (const line of lines) { if (line.startsWith('MemAvailable:')) { memAvailable = parseInt(line.split(/\s+/)[1], 10) * 1024; // Convert from KB to bytes @@ -764,7 +767,7 @@ export class MonitoringService { _memTotal = parseInt(line.split(/\s+/)[1], 10) * 1024; } } - + if (memAvailable > 0) { availableMem = memAvailable; } @@ -773,7 +776,7 @@ export class MonitoringService { this.logger.debug('Could not read /proc/meminfo', { error }); } } - + const usedMem = totalMem - availableMem; return { @@ -784,4 +787,4 @@ export class MonitoringService { percentage: (usedMem / totalMem) * 100, }; } -} \ No newline at end of file +} diff --git a/apps/stock/web-api/src/services/pipeline.service.ts b/apps/stock/web-api/src/services/pipeline.service.ts index f95906f..4162e4a 100644 --- a/apps/stock/web-api/src/services/pipeline.service.ts +++ b/apps/stock/web-api/src/services/pipeline.service.ts @@ -332,4 +332,4 @@ export class PipelineService { }; } } -} \ No newline at end of file +} diff --git a/apps/stock/web-api/src/types/monitoring.types.ts b/apps/stock/web-api/src/types/monitoring.types.ts index 8d41532..6560050 100644 --- a/apps/stock/web-api/src/types/monitoring.types.ts +++ b/apps/stock/web-api/src/types/monitoring.types.ts @@ -124,4 +124,4 @@ export interface SystemOverview { architecture: string; hostname: string; }; -} \ No newline at end of file +} diff --git a/apps/stock/web-app/src/components/ui/index.ts b/apps/stock/web-app/src/components/ui/index.ts index e065fee..2bc576a 100644 --- a/apps/stock/web-app/src/components/ui/index.ts +++ b/apps/stock/web-app/src/components/ui/index.ts @@ -3,4 +3,3 @@ export { Card, CardContent, CardHeader } from './Card'; export { DataTable } from './DataTable'; export { Dialog, DialogContent, DialogHeader, DialogTitle } from './Dialog'; export { StatCard } from './StatCard'; - diff --git a/apps/stock/web-app/src/features/exchanges/components/index.ts b/apps/stock/web-app/src/features/exchanges/components/index.ts index 2f6b5a7..b6984b4 100644 --- a/apps/stock/web-app/src/features/exchanges/components/index.ts +++ b/apps/stock/web-app/src/features/exchanges/components/index.ts @@ -1,4 +1,3 @@ - export { AddProviderMappingDialog } from './AddProviderMappingDialog'; export { AddExchangeDialog } from './AddExchangeDialog'; export { DeleteExchangeDialog } from './DeleteExchangeDialog'; diff --git a/apps/stock/web-app/src/features/exchanges/types/index.ts b/apps/stock/web-app/src/features/exchanges/types/index.ts index ad05828..f402c81 100644 --- a/apps/stock/web-app/src/features/exchanges/types/index.ts +++ b/apps/stock/web-app/src/features/exchanges/types/index.ts @@ -133,4 +133,4 @@ interface BaseDialogProps { export interface AddExchangeDialogProps extends BaseDialogProps { onCreateExchange: (request: CreateExchangeRequest) => Promise; -} \ No newline at end of file +} diff --git a/apps/stock/web-app/src/features/monitoring/components/index.ts b/apps/stock/web-app/src/features/monitoring/components/index.ts index c94ddfe..29e3ac2 100644 --- a/apps/stock/web-app/src/features/monitoring/components/index.ts +++ b/apps/stock/web-app/src/features/monitoring/components/index.ts @@ -11,4 +11,4 @@ export { ProxyStatsCard } from './ProxyStatsCard'; export { StatusBadge, ConnectionStatus, HealthStatus, ServiceStatusIndicator } from './StatusBadge'; export { MetricCard } from './MetricCard'; export { ServiceCard } from './ServiceCard'; -export { DatabaseCard } from './DatabaseCard'; \ No newline at end of file +export { DatabaseCard } from './DatabaseCard'; diff --git a/apps/stock/web-app/src/features/monitoring/hooks/index.ts b/apps/stock/web-app/src/features/monitoring/hooks/index.ts index 22327de..6c141b2 100644 --- a/apps/stock/web-app/src/features/monitoring/hooks/index.ts +++ b/apps/stock/web-app/src/features/monitoring/hooks/index.ts @@ -2,4 +2,4 @@ * Monitoring hooks exports */ -export * from './useMonitoring'; \ No newline at end of file +export * from './useMonitoring'; diff --git a/apps/stock/web-app/src/features/monitoring/hooks/useMonitoring.ts b/apps/stock/web-app/src/features/monitoring/hooks/useMonitoring.ts index 22ae4f0..0da703f 100644 --- a/apps/stock/web-app/src/features/monitoring/hooks/useMonitoring.ts +++ b/apps/stock/web-app/src/features/monitoring/hooks/useMonitoring.ts @@ -2,16 +2,16 @@ * Custom hook for monitoring data */ -import { useState, useEffect, useCallback } from 'react'; +import { useCallback, useEffect, useState } from 'react'; import { monitoringApi } from '../services/monitoringApi'; -import type { - SystemHealth, - CacheStats, - QueueStats, +import type { + CacheStats, DatabaseStats, - ServiceStatus, ProxyStats, - SystemOverview + QueueStats, + ServiceStatus, + SystemHealth, + SystemOverview, } from '../types'; export function useSystemHealth(refreshInterval: number = 5000) { @@ -33,7 +33,7 @@ export function useSystemHealth(refreshInterval: number = 5000) { useEffect(() => { fetchData(); - + if (refreshInterval > 0) { const interval = setInterval(fetchData, refreshInterval); return () => clearInterval(interval); @@ -62,7 +62,7 @@ export function useCacheStats(refreshInterval: number = 5000) { useEffect(() => { fetchData(); - + if (refreshInterval > 0) { const interval = setInterval(fetchData, refreshInterval); return () => clearInterval(interval); @@ -91,7 +91,7 @@ export function useQueueStats(refreshInterval: number = 5000) { useEffect(() => { fetchData(); - + if (refreshInterval > 0) { const interval = setInterval(fetchData, refreshInterval); return () => clearInterval(interval); @@ -120,7 +120,7 @@ export function useDatabaseStats(refreshInterval: number = 5000) { useEffect(() => { fetchData(); - + if (refreshInterval > 0) { const interval = setInterval(fetchData, refreshInterval); return () => clearInterval(interval); @@ -149,7 +149,7 @@ export function useServiceStatus(refreshInterval: number = 5000) { useEffect(() => { fetchData(); - + if (refreshInterval > 0) { const interval = setInterval(fetchData, refreshInterval); return () => clearInterval(interval); @@ -178,7 +178,7 @@ export function useProxyStats(refreshInterval: number = 5000) { useEffect(() => { fetchData(); - + if (refreshInterval > 0) { const interval = setInterval(fetchData, refreshInterval); return () => clearInterval(interval); @@ -207,7 +207,7 @@ export function useSystemOverview(refreshInterval: number = 5000) { useEffect(() => { fetchData(); - + if (refreshInterval > 0) { const interval = setInterval(fetchData, refreshInterval); return () => clearInterval(interval); @@ -215,4 +215,4 @@ export function useSystemOverview(refreshInterval: number = 5000) { }, [fetchData, refreshInterval]); return { data, loading, error, refetch: fetchData }; -} \ No newline at end of file +} diff --git a/apps/stock/web-app/src/features/monitoring/index.ts b/apps/stock/web-app/src/features/monitoring/index.ts index f56a62a..8998553 100644 --- a/apps/stock/web-app/src/features/monitoring/index.ts +++ b/apps/stock/web-app/src/features/monitoring/index.ts @@ -5,4 +5,4 @@ export { MonitoringPage } from './MonitoringPage'; export * from './types'; export * from './hooks/useMonitoring'; -export * from './services/monitoringApi'; \ No newline at end of file +export * from './services/monitoringApi'; diff --git a/apps/stock/web-app/src/features/monitoring/services/monitoringApi.ts b/apps/stock/web-app/src/features/monitoring/services/monitoringApi.ts index 2e3e210..df2488e 100644 --- a/apps/stock/web-app/src/features/monitoring/services/monitoringApi.ts +++ b/apps/stock/web-app/src/features/monitoring/services/monitoringApi.ts @@ -2,14 +2,14 @@ * Monitoring API Service */ -import type { - SystemHealth, - CacheStats, - QueueStats, +import type { + CacheStats, DatabaseStats, - ServiceStatus, ProxyStats, - SystemOverview + QueueStats, + ServiceStatus, + SystemHealth, + SystemOverview, } from '../types'; const API_BASE_URL = import.meta.env.VITE_API_BASE_URL || 'http://localhost:2003'; @@ -125,4 +125,4 @@ export const monitoringApi = { } return response.json(); }, -}; \ No newline at end of file +}; diff --git a/apps/stock/web-app/src/features/monitoring/types/index.ts b/apps/stock/web-app/src/features/monitoring/types/index.ts index e22a419..efb1000 100644 --- a/apps/stock/web-app/src/features/monitoring/types/index.ts +++ b/apps/stock/web-app/src/features/monitoring/types/index.ts @@ -117,4 +117,4 @@ export interface SystemOverview { architecture: string; hostname: string; }; -} \ No newline at end of file +} diff --git a/apps/stock/web-app/src/features/monitoring/utils/formatters.ts b/apps/stock/web-app/src/features/monitoring/utils/formatters.ts index a07878a..631ae6c 100644 --- a/apps/stock/web-app/src/features/monitoring/utils/formatters.ts +++ b/apps/stock/web-app/src/features/monitoring/utils/formatters.ts @@ -1,42 +1,48 @@ -/** - * Common formatting utilities for monitoring components - */ - -export function formatUptime(ms: number): string { - const seconds = Math.floor(ms / 1000); - const minutes = Math.floor(seconds / 60); - const hours = Math.floor(minutes / 60); - const days = Math.floor(hours / 24); - - if (days > 0) {return `${days}d ${hours % 24}h`;} - if (hours > 0) {return `${hours}h ${minutes % 60}m`;} - if (minutes > 0) {return `${minutes}m ${seconds % 60}s`;} - return `${seconds}s`; -} - -export function formatBytes(bytes: number): string { - const gb = bytes / 1024 / 1024 / 1024; - if (gb >= 1) { - return gb.toFixed(2) + ' GB'; - } - - const mb = bytes / 1024 / 1024; - if (mb >= 1) { - return mb.toFixed(2) + ' MB'; - } - - const kb = bytes / 1024; - if (kb >= 1) { - return kb.toFixed(2) + ' KB'; - } - - return bytes + ' B'; -} - -export function formatNumber(num: number): string { - return num.toLocaleString(); -} - -export function formatPercentage(value: number, decimals: number = 1): string { - return `${value.toFixed(decimals)}%`; -} \ No newline at end of file +/** + * Common formatting utilities for monitoring components + */ + +export function formatUptime(ms: number): string { + const seconds = Math.floor(ms / 1000); + const minutes = Math.floor(seconds / 60); + const hours = Math.floor(minutes / 60); + const days = Math.floor(hours / 24); + + if (days > 0) { + return `${days}d ${hours % 24}h`; + } + if (hours > 0) { + return `${hours}h ${minutes % 60}m`; + } + if (minutes > 0) { + return `${minutes}m ${seconds % 60}s`; + } + return `${seconds}s`; +} + +export function formatBytes(bytes: number): string { + const gb = bytes / 1024 / 1024 / 1024; + if (gb >= 1) { + return gb.toFixed(2) + ' GB'; + } + + const mb = bytes / 1024 / 1024; + if (mb >= 1) { + return mb.toFixed(2) + ' MB'; + } + + const kb = bytes / 1024; + if (kb >= 1) { + return kb.toFixed(2) + ' KB'; + } + + return bytes + ' B'; +} + +export function formatNumber(num: number): string { + return num.toLocaleString(); +} + +export function formatPercentage(value: number, decimals: number = 1): string { + return `${value.toFixed(decimals)}%`; +} diff --git a/apps/stock/web-app/src/features/pipeline/hooks/usePipeline.ts b/apps/stock/web-app/src/features/pipeline/hooks/usePipeline.ts index 9a14810..e9878bd 100644 --- a/apps/stock/web-app/src/features/pipeline/hooks/usePipeline.ts +++ b/apps/stock/web-app/src/features/pipeline/hooks/usePipeline.ts @@ -12,28 +12,29 @@ export function usePipeline() { const [error, setError] = useState(null); const [lastJobResult, setLastJobResult] = useState(null); - const executeOperation = useCallback(async ( - operation: () => Promise - ): Promise => { - try { - setLoading(true); - setError(null); - const result = await operation(); - setLastJobResult(result); - if (!result.success) { - setError(result.error || 'Operation failed'); + const executeOperation = useCallback( + async (operation: () => Promise): Promise => { + try { + setLoading(true); + setError(null); + const result = await operation(); + setLastJobResult(result); + if (!result.success) { + setError(result.error || 'Operation failed'); + return false; + } + return true; + } catch (err) { + const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'; + setError(errorMessage); + setLastJobResult({ success: false, error: errorMessage }); return false; + } finally { + setLoading(false); } - return true; - } catch (err) { - const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'; - setError(errorMessage); - setLastJobResult({ success: false, error: errorMessage }); - return false; - } finally { - setLoading(false); - } - }, []); + }, + [] + ); // Symbol sync operations const syncQMSymbols = useCallback( @@ -53,7 +54,7 @@ export function usePipeline() { ); const syncAllExchanges = useCallback( - (clearFirst: boolean = false) => + (clearFirst: boolean = false) => executeOperation(() => pipelineApi.syncAllExchanges(clearFirst)), [executeOperation] ); @@ -71,7 +72,7 @@ export function usePipeline() { // Maintenance operations const clearPostgreSQLData = useCallback( - (dataType: DataClearType = 'all') => + (dataType: DataClearType = 'all') => executeOperation(() => pipelineApi.clearPostgreSQLData(dataType)), [executeOperation] ); @@ -122,7 +123,8 @@ export function usePipeline() { setError(result.error || 'Failed to get provider mapping stats'); return null; } catch (err) { - const errorMessage = err instanceof Error ? err.message : 'Failed to get provider mapping stats'; + const errorMessage = + err instanceof Error ? err.message : 'Failed to get provider mapping stats'; setError(errorMessage); return null; } finally { @@ -156,4 +158,4 @@ export function usePipeline() { getExchangeStats, getProviderMappingStats, }; -} \ No newline at end of file +} diff --git a/apps/stock/web-app/src/features/pipeline/index.ts b/apps/stock/web-app/src/features/pipeline/index.ts index c4040e8..c9aaaf4 100644 --- a/apps/stock/web-app/src/features/pipeline/index.ts +++ b/apps/stock/web-app/src/features/pipeline/index.ts @@ -1,3 +1,3 @@ export { PipelinePage } from './PipelinePage'; export * from './hooks/usePipeline'; -export * from './types'; \ No newline at end of file +export * from './types'; diff --git a/apps/stock/web-app/src/features/pipeline/services/pipelineApi.ts b/apps/stock/web-app/src/features/pipeline/services/pipelineApi.ts index cfb97fe..4c9f266 100644 --- a/apps/stock/web-app/src/features/pipeline/services/pipelineApi.ts +++ b/apps/stock/web-app/src/features/pipeline/services/pipelineApi.ts @@ -1,16 +1,9 @@ -import type { - DataClearType, - PipelineJobResult, - PipelineStatsResult, -} from '../types'; +import type { DataClearType, PipelineJobResult, PipelineStatsResult } from '../types'; const API_BASE_URL = import.meta.env.VITE_API_BASE_URL || 'http://localhost:2003'; class PipelineApiService { - private async request( - endpoint: string, - options?: RequestInit - ): Promise { + private async request(endpoint: string, options?: RequestInit): Promise { const url = `${API_BASE_URL}/pipeline${endpoint}`; const response = await fetch(url, { @@ -79,4 +72,4 @@ class PipelineApiService { } // Export singleton instance -export const pipelineApi = new PipelineApiService(); \ No newline at end of file +export const pipelineApi = new PipelineApiService(); diff --git a/apps/stock/web-app/src/features/pipeline/types/index.ts b/apps/stock/web-app/src/features/pipeline/types/index.ts index edf1c0c..8ee2c63 100644 --- a/apps/stock/web-app/src/features/pipeline/types/index.ts +++ b/apps/stock/web-app/src/features/pipeline/types/index.ts @@ -32,7 +32,6 @@ export interface ProviderMappingStats { coveragePercentage: number; } - export type DataClearType = 'exchanges' | 'provider_mappings' | 'all'; export interface PipelineOperation { @@ -44,4 +43,4 @@ export interface PipelineOperation { category: 'sync' | 'stats' | 'maintenance'; dangerous?: boolean; params?: Record; -} \ No newline at end of file +} diff --git a/apps/stock/web-app/src/lib/constants.ts b/apps/stock/web-app/src/lib/constants.ts index 7501445..d6fc1d5 100644 --- a/apps/stock/web-app/src/lib/constants.ts +++ b/apps/stock/web-app/src/lib/constants.ts @@ -1,13 +1,13 @@ import { BuildingLibraryIcon, ChartBarIcon, + ChartPieIcon, + CircleStackIcon, CogIcon, DocumentTextIcon, HomeIcon, PresentationChartLineIcon, ServerStackIcon, - CircleStackIcon, - ChartPieIcon, } from '@heroicons/react/24/outline'; export interface NavigationItem { @@ -23,13 +23,13 @@ export const navigation: NavigationItem[] = [ { name: 'Portfolio', href: '/portfolio', icon: ChartBarIcon }, { name: 'Strategies', href: '/strategies', icon: DocumentTextIcon }, { name: 'Analytics', href: '/analytics', icon: PresentationChartLineIcon }, - { - name: 'System', + { + name: 'System', icon: ServerStackIcon, children: [ { name: 'Monitoring', href: '/system/monitoring', icon: ChartPieIcon }, { name: 'Pipeline', href: '/system/pipeline', icon: CircleStackIcon }, - ] + ], }, { name: 'Settings', href: '/settings', icon: CogIcon }, ]; diff --git a/bun.lock b/bun.lock index 201cb96..6824beb 100644 --- a/bun.lock +++ b/bun.lock @@ -189,6 +189,7 @@ "@stock-bot/browser": "workspace:*", "@stock-bot/cache": "workspace:*", "@stock-bot/config": "workspace:*", + "@stock-bot/handler-registry": "workspace:*", "@stock-bot/handlers": "workspace:*", "@stock-bot/logger": "workspace:*", "@stock-bot/mongodb": "workspace:*", @@ -199,6 +200,7 @@ "@stock-bot/shutdown": "workspace:*", "@stock-bot/types": "workspace:*", "awilix": "^12.0.5", + "glob": "^10.0.0", "hono": "^4.0.0", "zod": "^3.23.8", }, @@ -220,12 +222,24 @@ "typescript": "^5.3.0", }, }, + "libs/core/handler-registry": { + "name": "@stock-bot/handler-registry", + "version": "1.0.0", + "dependencies": { + "@stock-bot/types": "workspace:*", + }, + "devDependencies": { + "@types/bun": "*", + "typescript": "*", + }, + }, "libs/core/handlers": { "name": "@stock-bot/handlers", "version": "1.0.0", "dependencies": { "@stock-bot/cache": "workspace:*", "@stock-bot/config": "workspace:*", + "@stock-bot/handler-registry": "workspace:*", "@stock-bot/logger": "workspace:*", "@stock-bot/types": "workspace:*", "@stock-bot/utils": "workspace:*", @@ -257,7 +271,7 @@ "version": "1.0.0", "dependencies": { "@stock-bot/cache": "*", - "@stock-bot/handlers": "*", + "@stock-bot/handler-registry": "*", "@stock-bot/logger": "*", "@stock-bot/types": "*", "bullmq": "^5.0.0", @@ -820,6 +834,8 @@ "@stock-bot/event-bus": ["@stock-bot/event-bus@workspace:libs/core/event-bus"], + "@stock-bot/handler-registry": ["@stock-bot/handler-registry@workspace:libs/core/handler-registry"], + "@stock-bot/handlers": ["@stock-bot/handlers@workspace:libs/core/handlers"], "@stock-bot/logger": ["@stock-bot/logger@workspace:libs/core/logger"], @@ -2144,7 +2160,7 @@ "side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="], - "signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], + "signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="], "simple-concat": ["simple-concat@1.0.1", "", {}, "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q=="], @@ -2364,7 +2380,7 @@ "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], - "wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], + "wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], "wrap-ansi-cjs": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], @@ -2430,8 +2446,6 @@ "@isaacs/cliui/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], - "@isaacs/cliui/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], - "@mongodb-js/oidc-plugin/express": ["express@4.21.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.19.0", "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA=="], "@pm2/agent/chalk": ["chalk@3.0.0", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg=="], @@ -2450,6 +2464,8 @@ "@pm2/io/semver": ["semver@7.5.4", "", { "dependencies": { "lru-cache": "^6.0.0" }, "bin": { "semver": "bin/semver.js" } }, "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA=="], + "@pm2/io/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], + "@pm2/io/tslib": ["tslib@1.9.3", "", {}, "sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ=="], "@pm2/js-api/async": ["async@2.6.4", "", { "dependencies": { "lodash": "^4.17.14" } }, "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA=="], @@ -2516,6 +2532,8 @@ "cli-tableau/chalk": ["chalk@3.0.0", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg=="], + "cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], + "compress-commons/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="], "decompress-response/mimic-response": ["mimic-response@3.1.0", "", {}, "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="], @@ -2550,14 +2568,14 @@ "execa/is-stream": ["is-stream@3.0.0", "", {}, "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA=="], + "execa/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], + "express/cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], "express/mime-types": ["mime-types@3.0.1", "", { "dependencies": { "mime-db": "^1.54.0" } }, "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA=="], "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], - "foreground-child/signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="], - "get-uri/data-uri-to-buffer": ["data-uri-to-buffer@6.0.2", "", {}, "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw=="], "glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], @@ -2620,6 +2638,8 @@ "prebuild-install/tar-fs": ["tar-fs@2.1.3", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg=="], + "proper-lockfile/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], + "protobufjs/@types/node": ["@types/node@22.15.32", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-3jigKqgSjsH6gYZv2nEsqdXfZqIFGAV36XYYjf9KGZ3PSG+IhLecqPnI310RvjutyMwifE2hhhNEklOUrvx/wA=="], "proxy-agent/lru-cache": ["lru-cache@7.18.3", "", {}, "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA=="], @@ -2648,6 +2668,12 @@ "win-export-certificate-and-key/node-addon-api": ["node-addon-api@3.2.1", "", {}, "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A=="], + "wrap-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], + + "wrap-ansi/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="], + + "wrap-ansi/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], + "yauzl/buffer-crc32": ["buffer-crc32@0.2.13", "", {}, "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ=="], "@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="], @@ -2660,8 +2686,6 @@ "@isaacs/cliui/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], - "@isaacs/cliui/wrap-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], - "@mongodb-js/oidc-plugin/express/accepts": ["accepts@1.3.8", "", { "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw=="], "@mongodb-js/oidc-plugin/express/body-parser": ["body-parser@1.20.3", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g=="], @@ -2880,12 +2904,18 @@ "run-applescript/execa/onetime": ["onetime@5.1.2", "", { "dependencies": { "mimic-fn": "^2.1.0" } }, "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg=="], + "run-applescript/execa/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], + "run-applescript/execa/strip-final-newline": ["strip-final-newline@2.0.0", "", {}, "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA=="], "send/mime-types/mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], "type-is/mime-types/mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], + "wrap-ansi/string-width/emoji-regex": ["emoji-regex@9.2.2", "", {}, "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="], + + "wrap-ansi/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], + "@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="], "@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="], diff --git a/knip.json b/knip.json index 14f7f53..b0c336f 100644 --- a/knip.json +++ b/knip.json @@ -1,4 +1,4 @@ { "entry": ["src/index.ts"], "project": ["src/**/*.ts"] -} \ No newline at end of file +} diff --git a/libs/core/cache/src/cache-factory.ts b/libs/core/cache/src/cache-factory.ts index f778c0e..f82caa9 100644 --- a/libs/core/cache/src/cache-factory.ts +++ b/libs/core/cache/src/cache-factory.ts @@ -1,23 +1,23 @@ -import { NamespacedCache } from './namespaced-cache'; -import type { CacheProvider } from './types'; - -/** - * Factory function to create namespaced caches - * Provides a clean API for services to get their own namespaced cache - */ -export function createNamespacedCache( - cache: CacheProvider | null | undefined, - namespace: string -): CacheProvider | null { - if (!cache) { - return null; - } - return new NamespacedCache(cache, namespace); -} - -/** - * Type guard to check if cache is available - */ -export function isCacheAvailable(cache: any): cache is CacheProvider { - return cache !== null && cache !== undefined && typeof cache.get === 'function'; -} \ No newline at end of file +import { NamespacedCache } from './namespaced-cache'; +import type { CacheProvider } from './types'; + +/** + * Factory function to create namespaced caches + * Provides a clean API for services to get their own namespaced cache + */ +export function createNamespacedCache( + cache: CacheProvider | null | undefined, + namespace: string +): CacheProvider | null { + if (!cache) { + return null; + } + return new NamespacedCache(cache, namespace); +} + +/** + * Type guard to check if cache is available + */ +export function isCacheAvailable(cache: any): cache is CacheProvider { + return cache !== null && cache !== undefined && typeof cache.get === 'function'; +} diff --git a/libs/core/cache/src/connection-manager.ts b/libs/core/cache/src/connection-manager.ts index 9339f67..2c48bda 100644 --- a/libs/core/cache/src/connection-manager.ts +++ b/libs/core/cache/src/connection-manager.ts @@ -88,7 +88,7 @@ export class RedisConnectionManager { }; const redis = new Redis(redisOptions); - + // Use the provided logger or fall back to instance logger const log = logger || this.logger; diff --git a/libs/core/cache/src/namespaced-cache.ts b/libs/core/cache/src/namespaced-cache.ts index c42ed63..13a399d 100644 --- a/libs/core/cache/src/namespaced-cache.ts +++ b/libs/core/cache/src/namespaced-cache.ts @@ -1,101 +1,100 @@ -import type { CacheProvider } from './types'; - -/** - * A cache wrapper that automatically prefixes all keys with a namespace - * Used to provide isolated cache spaces for different services - */ -export class NamespacedCache implements CacheProvider { - private readonly prefix: string; - - constructor( - private readonly cache: CacheProvider, - private readonly namespace: string - ) { - this.prefix = `cache:${namespace}:`; - } - - async get(key: string): Promise { - return this.cache.get(`${this.prefix}${key}`); - } - - async set( - key: string, - value: T, - options?: - | number - | { - ttl?: number; - preserveTTL?: boolean; - onlyIfExists?: boolean; - onlyIfNotExists?: boolean; - getOldValue?: boolean; - } - ): Promise { - return this.cache.set(`${this.prefix}${key}`, value, options); - } - - async del(key: string): Promise { - return this.cache.del(`${this.prefix}${key}`); - } - - async exists(key: string): Promise { - return this.cache.exists(`${this.prefix}${key}`); - } - - async keys(pattern: string = '*'): Promise { - const fullPattern = `${this.prefix}${pattern}`; - const keys = await this.cache.keys(fullPattern); - // Remove the prefix from returned keys for cleaner API - return keys.map(k => k.substring(this.prefix.length)); - } - - async clear(): Promise { - // Clear only keys with this namespace prefix - const keys = await this.cache.keys(`${this.prefix}*`); - if (keys.length > 0) { - await Promise.all(keys.map(key => this.cache.del(key))); - } - } - - - getStats() { - return this.cache.getStats(); - } - - async health(): Promise { - return this.cache.health(); - } - - isReady(): boolean { - return this.cache.isReady(); - } - - async waitForReady(timeout?: number): Promise { - return this.cache.waitForReady(timeout); - } - - async close(): Promise { - // Namespaced cache doesn't own the connection, so we don't close it - // The underlying cache instance should be closed by its owner - } - - getNamespace(): string { - return this.namespace; - } - - getFullPrefix(): string { - return this.prefix; - } - - /** - * Get a value using a raw Redis key (bypassing the namespace prefix) - * Delegates to the underlying cache's getRaw method if available - */ - async getRaw(key: string): Promise { - if (this.cache.getRaw) { - return this.cache.getRaw(key); - } - // Fallback for caches that don't implement getRaw - return null; - } -} \ No newline at end of file +import type { CacheProvider } from './types'; + +/** + * A cache wrapper that automatically prefixes all keys with a namespace + * Used to provide isolated cache spaces for different services + */ +export class NamespacedCache implements CacheProvider { + private readonly prefix: string; + + constructor( + private readonly cache: CacheProvider, + private readonly namespace: string + ) { + this.prefix = `cache:${namespace}:`; + } + + async get(key: string): Promise { + return this.cache.get(`${this.prefix}${key}`); + } + + async set( + key: string, + value: T, + options?: + | number + | { + ttl?: number; + preserveTTL?: boolean; + onlyIfExists?: boolean; + onlyIfNotExists?: boolean; + getOldValue?: boolean; + } + ): Promise { + return this.cache.set(`${this.prefix}${key}`, value, options); + } + + async del(key: string): Promise { + return this.cache.del(`${this.prefix}${key}`); + } + + async exists(key: string): Promise { + return this.cache.exists(`${this.prefix}${key}`); + } + + async keys(pattern: string = '*'): Promise { + const fullPattern = `${this.prefix}${pattern}`; + const keys = await this.cache.keys(fullPattern); + // Remove the prefix from returned keys for cleaner API + return keys.map(k => k.substring(this.prefix.length)); + } + + async clear(): Promise { + // Clear only keys with this namespace prefix + const keys = await this.cache.keys(`${this.prefix}*`); + if (keys.length > 0) { + await Promise.all(keys.map(key => this.cache.del(key))); + } + } + + getStats() { + return this.cache.getStats(); + } + + async health(): Promise { + return this.cache.health(); + } + + isReady(): boolean { + return this.cache.isReady(); + } + + async waitForReady(timeout?: number): Promise { + return this.cache.waitForReady(timeout); + } + + async close(): Promise { + // Namespaced cache doesn't own the connection, so we don't close it + // The underlying cache instance should be closed by its owner + } + + getNamespace(): string { + return this.namespace; + } + + getFullPrefix(): string { + return this.prefix; + } + + /** + * Get a value using a raw Redis key (bypassing the namespace prefix) + * Delegates to the underlying cache's getRaw method if available + */ + async getRaw(key: string): Promise { + if (this.cache.getRaw) { + return this.cache.getRaw(key); + } + // Fallback for caches that don't implement getRaw + return null; + } +} diff --git a/libs/core/config/src/config-manager.ts b/libs/core/config/src/config-manager.ts index 5da0b44..76ee474 100644 --- a/libs/core/config/src/config-manager.ts +++ b/libs/core/config/src/config-manager.ts @@ -1,9 +1,9 @@ import { join } from 'path'; import { z } from 'zod'; +import { getLogger } from '@stock-bot/logger'; import { EnvLoader } from './loaders/env.loader'; import { FileLoader } from './loaders/file.loader'; import { ConfigError, ConfigValidationError } from './errors'; -import { getLogger } from '@stock-bot/logger'; import type { ConfigLoader, ConfigManagerOptions, @@ -82,9 +82,9 @@ export class ConfigManager> { expected: (err as any).expected, received: (err as any).received, })); - + this.logger.error('Configuration validation failed:', errorDetails); - + throw new ConfigValidationError('Configuration validation failed', error.errors); } throw error; diff --git a/libs/core/config/src/index.ts b/libs/core/config/src/index.ts index ecf9268..80d6511 100644 --- a/libs/core/config/src/index.ts +++ b/libs/core/config/src/index.ts @@ -1,10 +1,10 @@ // Import necessary types +import { z } from 'zod'; import { EnvLoader } from './loaders/env.loader'; import { FileLoader } from './loaders/file.loader'; import { ConfigManager } from './config-manager'; import type { BaseAppConfig } from './schemas'; import { baseAppSchema } from './schemas'; -import { z } from 'zod'; // Legacy singleton instance for backward compatibility let configInstance: ConfigManager | null = null; @@ -56,7 +56,6 @@ function loadCriticalEnvVarsSync(): void { // Load critical env vars immediately loadCriticalEnvVarsSync(); - /** * Initialize configuration for a service in a monorepo. * Automatically loads configs from: @@ -121,8 +120,6 @@ export function getLogConfig() { return getConfig().log; } - - export function getQueueConfig() { return getConfig().queue; } diff --git a/libs/core/config/src/schemas/__tests__/unified-app.test.ts b/libs/core/config/src/schemas/__tests__/unified-app.test.ts index aed96fa..a294037 100644 --- a/libs/core/config/src/schemas/__tests__/unified-app.test.ts +++ b/libs/core/config/src/schemas/__tests__/unified-app.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from 'bun:test'; -import { unifiedAppSchema, toUnifiedConfig, getStandardServiceName } from '../unified-app.schema'; +import { getStandardServiceName, toUnifiedConfig, unifiedAppSchema } from '../unified-app.schema'; describe('UnifiedAppConfig', () => { describe('getStandardServiceName', () => { @@ -74,13 +74,13 @@ describe('UnifiedAppConfig', () => { }; const result = unifiedAppSchema.parse(config); - + // Should have both nested and flat structure expect(result.postgres).toBeDefined(); expect(result.mongodb).toBeDefined(); expect(result.database?.postgres).toBeDefined(); expect(result.database?.mongodb).toBeDefined(); - + // Values should match expect(result.postgres?.host).toBe('localhost'); expect(result.postgres?.port).toBe(5432); @@ -144,7 +144,7 @@ describe('UnifiedAppConfig', () => { }; const unified = toUnifiedConfig(stockBotConfig); - + expect(unified.service.serviceName).toBe('data-ingestion'); expect(unified.redis).toBeDefined(); expect(unified.redis?.host).toBe('localhost'); @@ -152,4 +152,4 @@ describe('UnifiedAppConfig', () => { expect(unified.postgres?.host).toBe('localhost'); }); }); -}); \ No newline at end of file +}); diff --git a/libs/core/config/src/schemas/base-app.schema.ts b/libs/core/config/src/schemas/base-app.schema.ts index 0167e35..1e516b4 100644 --- a/libs/core/config/src/schemas/base-app.schema.ts +++ b/libs/core/config/src/schemas/base-app.schema.ts @@ -1,61 +1,63 @@ -import { z } from 'zod'; -import { environmentSchema } from './base.schema'; -import { - postgresConfigSchema, - mongodbConfigSchema, - questdbConfigSchema, - dragonflyConfigSchema -} from './database.schema'; -import { - serviceConfigSchema, - loggingConfigSchema, - queueConfigSchema, - httpConfigSchema, - webshareConfigSchema, - browserConfigSchema, - proxyConfigSchema -} from './service.schema'; - -/** - * Generic base application schema that can be extended by specific apps - */ -export const baseAppSchema = z.object({ - // Basic app info - name: z.string(), - version: z.string(), - environment: environmentSchema.default('development'), - - // Service configuration - service: serviceConfigSchema, - - // Logging configuration - log: loggingConfigSchema, - - // Database configuration - apps can choose which databases they need - database: z.object({ - postgres: postgresConfigSchema.optional(), - mongodb: mongodbConfigSchema.optional(), - questdb: questdbConfigSchema.optional(), - dragonfly: dragonflyConfigSchema.optional(), - }).optional(), - - // Redis configuration (used for cache and queue) - redis: dragonflyConfigSchema.optional(), - - // Queue configuration - queue: queueConfigSchema.optional(), - - // HTTP client configuration - http: httpConfigSchema.optional(), - - // WebShare proxy configuration - webshare: webshareConfigSchema.optional(), - - // Browser configuration - browser: browserConfigSchema.optional(), - - // Proxy manager configuration - proxy: proxyConfigSchema.optional(), -}); - -export type BaseAppConfig = z.infer; \ No newline at end of file +import { z } from 'zod'; +import { environmentSchema } from './base.schema'; +import { + dragonflyConfigSchema, + mongodbConfigSchema, + postgresConfigSchema, + questdbConfigSchema, +} from './database.schema'; +import { + browserConfigSchema, + httpConfigSchema, + loggingConfigSchema, + proxyConfigSchema, + queueConfigSchema, + serviceConfigSchema, + webshareConfigSchema, +} from './service.schema'; + +/** + * Generic base application schema that can be extended by specific apps + */ +export const baseAppSchema = z.object({ + // Basic app info + name: z.string(), + version: z.string(), + environment: environmentSchema.default('development'), + + // Service configuration + service: serviceConfigSchema, + + // Logging configuration + log: loggingConfigSchema, + + // Database configuration - apps can choose which databases they need + database: z + .object({ + postgres: postgresConfigSchema.optional(), + mongodb: mongodbConfigSchema.optional(), + questdb: questdbConfigSchema.optional(), + dragonfly: dragonflyConfigSchema.optional(), + }) + .optional(), + + // Redis configuration (used for cache and queue) + redis: dragonflyConfigSchema.optional(), + + // Queue configuration + queue: queueConfigSchema.optional(), + + // HTTP client configuration + http: httpConfigSchema.optional(), + + // WebShare proxy configuration + webshare: webshareConfigSchema.optional(), + + // Browser configuration + browser: browserConfigSchema.optional(), + + // Proxy manager configuration + proxy: proxyConfigSchema.optional(), +}); + +export type BaseAppConfig = z.infer; diff --git a/libs/core/config/src/schemas/index.ts b/libs/core/config/src/schemas/index.ts index 23cf90d..0b486d4 100644 --- a/libs/core/config/src/schemas/index.ts +++ b/libs/core/config/src/schemas/index.ts @@ -15,4 +15,3 @@ export type { BaseAppConfig } from './base-app.schema'; // Export unified schema for standardized configuration export { unifiedAppSchema, toUnifiedConfig, getStandardServiceName } from './unified-app.schema'; export type { UnifiedAppConfig } from './unified-app.schema'; - diff --git a/libs/core/config/src/schemas/service.schema.ts b/libs/core/config/src/schemas/service.schema.ts index 5ff474e..004725e 100644 --- a/libs/core/config/src/schemas/service.schema.ts +++ b/libs/core/config/src/schemas/service.schema.ts @@ -100,8 +100,10 @@ export const proxyConfigSchema = z.object({ enabled: z.boolean().default(false), cachePrefix: z.string().default('proxy:'), ttl: z.number().default(3600), - webshare: z.object({ - apiKey: z.string(), - apiUrl: z.string().default('https://proxy.webshare.io/api/v2/'), - }).optional(), + webshare: z + .object({ + apiKey: z.string(), + apiUrl: z.string().default('https://proxy.webshare.io/api/v2/'), + }) + .optional(), }); diff --git a/libs/core/config/src/schemas/unified-app.schema.ts b/libs/core/config/src/schemas/unified-app.schema.ts index 9fcf40c..3fbfb45 100644 --- a/libs/core/config/src/schemas/unified-app.schema.ts +++ b/libs/core/config/src/schemas/unified-app.schema.ts @@ -1,62 +1,67 @@ import { z } from 'zod'; import { baseAppSchema } from './base-app.schema'; -import { - postgresConfigSchema, - mongodbConfigSchema, - questdbConfigSchema, - dragonflyConfigSchema +import { + dragonflyConfigSchema, + mongodbConfigSchema, + postgresConfigSchema, + questdbConfigSchema, } from './database.schema'; /** * Unified application configuration schema that provides both nested and flat access * to database configurations for backward compatibility while maintaining a clean structure */ -export const unifiedAppSchema = baseAppSchema.extend({ - // Flat database configs for DI system (these take precedence) - redis: dragonflyConfigSchema.optional(), - mongodb: mongodbConfigSchema.optional(), - postgres: postgresConfigSchema.optional(), - questdb: questdbConfigSchema.optional(), -}).transform((data) => { - // Ensure service.serviceName is set from service.name if not provided - if (data.service && !data.service.serviceName) { - data.service.serviceName = data.service.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, ''); - } +export const unifiedAppSchema = baseAppSchema + .extend({ + // Flat database configs for DI system (these take precedence) + redis: dragonflyConfigSchema.optional(), + mongodb: mongodbConfigSchema.optional(), + postgres: postgresConfigSchema.optional(), + questdb: questdbConfigSchema.optional(), + }) + .transform(data => { + // Ensure service.serviceName is set from service.name if not provided + if (data.service && !data.service.serviceName) { + data.service.serviceName = data.service.name + .replace(/([A-Z])/g, '-$1') + .toLowerCase() + .replace(/^-/, ''); + } - // If flat configs exist, ensure they're also in the nested database object - if (data.redis || data.mongodb || data.postgres || data.questdb) { - data.database = { - ...data.database, - dragonfly: data.redis || data.database?.dragonfly, - mongodb: data.mongodb || data.database?.mongodb, - postgres: data.postgres || data.database?.postgres, - questdb: data.questdb || data.database?.questdb, - }; - } + // If flat configs exist, ensure they're also in the nested database object + if (data.redis || data.mongodb || data.postgres || data.questdb) { + data.database = { + ...data.database, + dragonfly: data.redis || data.database?.dragonfly, + mongodb: data.mongodb || data.database?.mongodb, + postgres: data.postgres || data.database?.postgres, + questdb: data.questdb || data.database?.questdb, + }; + } - // If nested configs exist but flat ones don't, copy them to flat structure - if (data.database) { - if (data.database.dragonfly && !data.redis) { - data.redis = data.database.dragonfly; - } - if (data.database.mongodb && !data.mongodb) { - data.mongodb = data.database.mongodb; - } - if (data.database.postgres && !data.postgres) { - data.postgres = data.database.postgres; - } - if (data.database.questdb && !data.questdb) { - // Handle the ilpPort -> influxPort mapping for DI system - const questdbConfig = { ...data.database.questdb }; - if ('ilpPort' in questdbConfig && !('influxPort' in questdbConfig)) { - (questdbConfig as any).influxPort = questdbConfig.ilpPort; + // If nested configs exist but flat ones don't, copy them to flat structure + if (data.database) { + if (data.database.dragonfly && !data.redis) { + data.redis = data.database.dragonfly; + } + if (data.database.mongodb && !data.mongodb) { + data.mongodb = data.database.mongodb; + } + if (data.database.postgres && !data.postgres) { + data.postgres = data.database.postgres; + } + if (data.database.questdb && !data.questdb) { + // Handle the ilpPort -> influxPort mapping for DI system + const questdbConfig = { ...data.database.questdb }; + if ('ilpPort' in questdbConfig && !('influxPort' in questdbConfig)) { + (questdbConfig as any).influxPort = questdbConfig.ilpPort; + } + data.questdb = questdbConfig; } - data.questdb = questdbConfig; } - } - return data; -}); + return data; + }); export type UnifiedAppConfig = z.infer; @@ -72,5 +77,8 @@ export function toUnifiedConfig(config: any): UnifiedAppConfig { */ export function getStandardServiceName(serviceName: string): string { // Convert camelCase to kebab-case - return serviceName.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, ''); -} \ No newline at end of file + return serviceName + .replace(/([A-Z])/g, '-$1') + .toLowerCase() + .replace(/^-/, ''); +} diff --git a/libs/core/di/package.json b/libs/core/di/package.json index c070f8c..fe66baf 100644 --- a/libs/core/di/package.json +++ b/libs/core/di/package.json @@ -20,6 +20,8 @@ "@stock-bot/queue": "workspace:*", "@stock-bot/shutdown": "workspace:*", "@stock-bot/handlers": "workspace:*", + "@stock-bot/handler-registry": "workspace:*", + "glob": "^10.0.0", "zod": "^3.23.8", "hono": "^4.0.0", "awilix": "^12.0.5" diff --git a/libs/core/di/src/awilix-container.ts b/libs/core/di/src/awilix-container.ts index 610d63b..1c779d3 100644 --- a/libs/core/di/src/awilix-container.ts +++ b/libs/core/di/src/awilix-container.ts @@ -3,16 +3,16 @@ * Creates a decoupled, reusable dependency injection container */ +import { type AwilixContainer } from 'awilix'; import type { Browser } from '@stock-bot/browser'; import type { CacheProvider } from '@stock-bot/cache'; -import type { IServiceContainer } from '@stock-bot/types'; import type { Logger } from '@stock-bot/logger'; import type { MongoDBClient } from '@stock-bot/mongodb'; import type { PostgreSQLClient } from '@stock-bot/postgres'; import type { ProxyManager } from '@stock-bot/proxy'; import type { QuestDBClient } from '@stock-bot/questdb'; import type { QueueManager } from '@stock-bot/queue'; -import { type AwilixContainer } from 'awilix'; +import type { IServiceContainer } from '@stock-bot/types'; import type { AppConfig } from './config/schemas'; // Re-export for backward compatibility @@ -41,8 +41,6 @@ export interface ServiceDefinitions { serviceContainer: IServiceContainer; } - - // Export typed container export type ServiceContainer = AwilixContainer; export type ServiceCradle = ServiceDefinitions; @@ -59,5 +57,3 @@ export interface ServiceContainerOptions { enableBrowser?: boolean; enableProxy?: boolean; } - - diff --git a/libs/core/di/src/config/schemas/index.ts b/libs/core/di/src/config/schemas/index.ts index bb6f6e6..c1e1a29 100644 --- a/libs/core/di/src/config/schemas/index.ts +++ b/libs/core/di/src/config/schemas/index.ts @@ -1,9 +1,9 @@ import { z } from 'zod'; -import { redisConfigSchema } from './redis.schema'; import { mongodbConfigSchema } from './mongodb.schema'; import { postgresConfigSchema } from './postgres.schema'; import { questdbConfigSchema } from './questdb.schema'; -import { proxyConfigSchema, browserConfigSchema, queueConfigSchema } from './service.schema'; +import { redisConfigSchema } from './redis.schema'; +import { browserConfigSchema, proxyConfigSchema, queueConfigSchema } from './service.schema'; export const appConfigSchema = z.object({ redis: redisConfigSchema, @@ -13,11 +13,13 @@ export const appConfigSchema = z.object({ proxy: proxyConfigSchema.optional(), browser: browserConfigSchema.optional(), queue: queueConfigSchema.optional(), - service: z.object({ - name: z.string(), - serviceName: z.string().optional(), // Standard kebab-case service name - port: z.number().optional(), - }).optional(), + service: z + .object({ + name: z.string(), + serviceName: z.string().optional(), // Standard kebab-case service name + port: z.number().optional(), + }) + .optional(), }); export type AppConfig = z.infer; @@ -27,4 +29,4 @@ export * from './redis.schema'; export * from './mongodb.schema'; export * from './postgres.schema'; export * from './questdb.schema'; -export * from './service.schema'; \ No newline at end of file +export * from './service.schema'; diff --git a/libs/core/di/src/config/schemas/mongodb.schema.ts b/libs/core/di/src/config/schemas/mongodb.schema.ts index b05cee5..e9832d0 100644 --- a/libs/core/di/src/config/schemas/mongodb.schema.ts +++ b/libs/core/di/src/config/schemas/mongodb.schema.ts @@ -1,9 +1,9 @@ -import { z } from 'zod'; - -export const mongodbConfigSchema = z.object({ - enabled: z.boolean().optional().default(true), - uri: z.string(), - database: z.string(), -}); - -export type MongoDBConfig = z.infer; \ No newline at end of file +import { z } from 'zod'; + +export const mongodbConfigSchema = z.object({ + enabled: z.boolean().optional().default(true), + uri: z.string(), + database: z.string(), +}); + +export type MongoDBConfig = z.infer; diff --git a/libs/core/di/src/config/schemas/postgres.schema.ts b/libs/core/di/src/config/schemas/postgres.schema.ts index ecb3e93..6052193 100644 --- a/libs/core/di/src/config/schemas/postgres.schema.ts +++ b/libs/core/di/src/config/schemas/postgres.schema.ts @@ -1,12 +1,12 @@ -import { z } from 'zod'; - -export const postgresConfigSchema = z.object({ - enabled: z.boolean().optional().default(true), - host: z.string().default('localhost'), - port: z.number().default(5432), - database: z.string(), - user: z.string(), - password: z.string(), -}); - -export type PostgresConfig = z.infer; \ No newline at end of file +import { z } from 'zod'; + +export const postgresConfigSchema = z.object({ + enabled: z.boolean().optional().default(true), + host: z.string().default('localhost'), + port: z.number().default(5432), + database: z.string(), + user: z.string(), + password: z.string(), +}); + +export type PostgresConfig = z.infer; diff --git a/libs/core/di/src/config/schemas/questdb.schema.ts b/libs/core/di/src/config/schemas/questdb.schema.ts index cff9160..1c9f8d5 100644 --- a/libs/core/di/src/config/schemas/questdb.schema.ts +++ b/libs/core/di/src/config/schemas/questdb.schema.ts @@ -1,12 +1,12 @@ -import { z } from 'zod'; - -export const questdbConfigSchema = z.object({ - enabled: z.boolean().optional().default(true), - host: z.string().default('localhost'), - httpPort: z.number().optional().default(9000), - pgPort: z.number().optional().default(8812), - influxPort: z.number().optional().default(9009), - database: z.string().optional().default('questdb'), -}); - -export type QuestDBConfig = z.infer; \ No newline at end of file +import { z } from 'zod'; + +export const questdbConfigSchema = z.object({ + enabled: z.boolean().optional().default(true), + host: z.string().default('localhost'), + httpPort: z.number().optional().default(9000), + pgPort: z.number().optional().default(8812), + influxPort: z.number().optional().default(9009), + database: z.string().optional().default('questdb'), +}); + +export type QuestDBConfig = z.infer; diff --git a/libs/core/di/src/config/schemas/redis.schema.ts b/libs/core/di/src/config/schemas/redis.schema.ts index 79b057f..fd08b8a 100644 --- a/libs/core/di/src/config/schemas/redis.schema.ts +++ b/libs/core/di/src/config/schemas/redis.schema.ts @@ -1,12 +1,12 @@ -import { z } from 'zod'; - -export const redisConfigSchema = z.object({ - enabled: z.boolean().optional().default(true), - host: z.string().default('localhost'), - port: z.number().default(6379), - password: z.string().optional(), - username: z.string().optional(), - db: z.number().optional().default(0), -}); - -export type RedisConfig = z.infer; \ No newline at end of file +import { z } from 'zod'; + +export const redisConfigSchema = z.object({ + enabled: z.boolean().optional().default(true), + host: z.string().default('localhost'), + port: z.number().default(6379), + password: z.string().optional(), + username: z.string().optional(), + db: z.number().optional().default(0), +}); + +export type RedisConfig = z.infer; diff --git a/libs/core/di/src/config/schemas/service.schema.ts b/libs/core/di/src/config/schemas/service.schema.ts index 47420af..b16fe53 100644 --- a/libs/core/di/src/config/schemas/service.schema.ts +++ b/libs/core/di/src/config/schemas/service.schema.ts @@ -4,10 +4,12 @@ export const proxyConfigSchema = z.object({ enabled: z.boolean().default(false), cachePrefix: z.string().optional().default('proxy:'), ttl: z.number().optional().default(3600), - webshare: z.object({ - apiKey: z.string(), - apiUrl: z.string().default('https://proxy.webshare.io/api/v2/'), - }).optional(), + webshare: z + .object({ + apiKey: z.string(), + apiUrl: z.string().default('https://proxy.webshare.io/api/v2/'), + }) + .optional(), }); export const browserConfigSchema = z.object({ @@ -21,18 +23,23 @@ export const queueConfigSchema = z.object({ concurrency: z.number().optional().default(1), enableScheduledJobs: z.boolean().optional().default(true), delayWorkerStart: z.boolean().optional().default(false), - defaultJobOptions: z.object({ - attempts: z.number().default(3), - backoff: z.object({ - type: z.enum(['exponential', 'fixed']).default('exponential'), - delay: z.number().default(1000), - }).default({}), - removeOnComplete: z.number().default(100), - removeOnFail: z.number().default(50), - timeout: z.number().optional(), - }).optional().default({}), + defaultJobOptions: z + .object({ + attempts: z.number().default(3), + backoff: z + .object({ + type: z.enum(['exponential', 'fixed']).default('exponential'), + delay: z.number().default(1000), + }) + .default({}), + removeOnComplete: z.number().default(100), + removeOnFail: z.number().default(50), + timeout: z.number().optional(), + }) + .optional() + .default({}), }); export type ProxyConfig = z.infer; export type BrowserConfig = z.infer; -export type QueueConfig = z.infer; \ No newline at end of file +export type QueueConfig = z.infer; diff --git a/libs/core/di/src/container/builder.ts b/libs/core/di/src/container/builder.ts index abb20e3..9dffb6a 100644 --- a/libs/core/di/src/container/builder.ts +++ b/libs/core/di/src/container/builder.ts @@ -1,15 +1,17 @@ -import { createContainer, InjectionMode, asFunction, type AwilixContainer } from 'awilix'; +import { asClass, asFunction, createContainer, InjectionMode, type AwilixContainer } from 'awilix'; import type { BaseAppConfig as StockBotAppConfig, UnifiedAppConfig } from '@stock-bot/config'; -import { appConfigSchema, type AppConfig } from '../config/schemas'; import { toUnifiedConfig } from '@stock-bot/config'; -import { - registerCoreServices, +import { HandlerRegistry } from '@stock-bot/handler-registry'; +import { appConfigSchema, type AppConfig } from '../config/schemas'; +import { + registerApplicationServices, registerCacheServices, + registerCoreServices, registerDatabaseServices, - registerApplicationServices } from '../registrations'; +import { HandlerScanner } from '../scanner'; import { ServiceLifecycleManager } from '../utils/lifecycle'; -import type { ServiceDefinitions, ContainerBuildOptions } from './types'; +import type { ContainerBuildOptions, ServiceDefinitions } from './types'; export class ServiceContainerBuilder { private config: Partial = {}; @@ -38,7 +40,10 @@ export class ServiceContainerBuilder { return this; } - enableService(service: keyof Omit, enabled = true): this { + enableService( + service: keyof Omit, + enabled = true + ): this { this.options[service] = enabled; return this; } @@ -51,7 +56,7 @@ export class ServiceContainerBuilder { async build(): Promise> { // Validate and prepare config const validatedConfig = this.prepareConfig(); - + // Create container const container = createContainer({ injectionMode: InjectionMode.PROXY, @@ -77,17 +82,19 @@ export class ServiceContainerBuilder { private applyServiceOptions(config: Partial): AppConfig { // Ensure questdb config has the right field names for DI - const questdbConfig = config.questdb ? { - ...config.questdb, - influxPort: (config.questdb as any).influxPort || (config.questdb as any).ilpPort || 9009, - } : { - enabled: true, - host: 'localhost', - httpPort: 9000, - pgPort: 8812, - influxPort: 9009, - database: 'questdb', - }; + const questdbConfig = config.questdb + ? { + ...config.questdb, + influxPort: (config.questdb as any).influxPort || (config.questdb as any).ilpPort || 9009, + } + : { + enabled: true, + host: 'localhost', + httpPort: 9000, + pgPort: 8812, + influxPort: 9009, + database: 'questdb', + }; return { redis: config.redis || { @@ -110,61 +117,88 @@ export class ServiceContainerBuilder { password: 'postgres', }, questdb: this.options.enableQuestDB ? questdbConfig : undefined, - proxy: this.options.enableProxy ? (config.proxy || { enabled: false, cachePrefix: 'proxy:', ttl: 3600 }) : undefined, - browser: this.options.enableBrowser ? (config.browser || { headless: true, timeout: 30000 }) : undefined, - queue: this.options.enableQueue ? (config.queue || { - enabled: true, - workers: 1, - concurrency: 1, - enableScheduledJobs: true, - delayWorkerStart: false, - defaultJobOptions: { - attempts: 3, - backoff: { type: 'exponential' as const, delay: 1000 }, - removeOnComplete: 100, - removeOnFail: 50, - } - }) : undefined, + proxy: this.options.enableProxy + ? config.proxy || { enabled: false, cachePrefix: 'proxy:', ttl: 3600 } + : undefined, + browser: this.options.enableBrowser + ? config.browser || { headless: true, timeout: 30000 } + : undefined, + queue: this.options.enableQueue + ? config.queue || { + enabled: true, + workers: 1, + concurrency: 1, + enableScheduledJobs: true, + delayWorkerStart: false, + defaultJobOptions: { + attempts: 3, + backoff: { type: 'exponential' as const, delay: 1000 }, + removeOnComplete: 100, + removeOnFail: 50, + }, + } + : undefined, service: config.service, }; } - private registerServices(container: AwilixContainer, config: AppConfig): void { + private registerServices( + container: AwilixContainer, + config: AppConfig + ): void { + // Register handler infrastructure first + container.register({ + handlerRegistry: asClass(HandlerRegistry).singleton(), + handlerScanner: asClass(HandlerScanner).singleton(), + }); + registerCoreServices(container, config); registerCacheServices(container, config); registerDatabaseServices(container, config); registerApplicationServices(container, config); - + // Register service container aggregate container.register({ - serviceContainer: asFunction(({ - config: _config, logger, cache, globalCache, proxyManager, browser, - queueManager, mongoClient, postgresClient, questdbClient - }) => ({ - logger, - cache, - globalCache, - proxy: proxyManager, // Map proxyManager to proxy - browser, - queue: queueManager, // Map queueManager to queue - mongodb: mongoClient, // Map mongoClient to mongodb - postgres: postgresClient, // Map postgresClient to postgres - questdb: questdbClient, // Map questdbClient to questdb - })).singleton(), + serviceContainer: asFunction( + ({ + config: _config, + logger, + cache, + globalCache, + proxyManager, + browser, + queueManager, + mongoClient, + postgresClient, + questdbClient, + }) => ({ + logger, + cache, + globalCache, + proxy: proxyManager, // Map proxyManager to proxy + browser, + queue: queueManager, // Map queueManager to queue + mongodb: mongoClient, // Map mongoClient to mongodb + postgres: postgresClient, // Map postgresClient to postgres + questdb: questdbClient, // Map questdbClient to questdb + }) + ).singleton(), }); } private transformStockBotConfig(config: UnifiedAppConfig): Partial { // Unified config already has flat structure, just extract what we need // Handle questdb field name mapping - const questdb = config.questdb ? { - enabled: config.questdb.enabled || true, - host: config.questdb.host || 'localhost', - httpPort: config.questdb.httpPort || 9000, - pgPort: config.questdb.pgPort || 8812, - influxPort: (config.questdb as any).influxPort || (config.questdb as any).ilpPort || 9009, - database: config.questdb.database || 'questdb', - } : undefined; + const questdb = config.questdb + ? { + enabled: config.questdb.enabled || true, + host: config.questdb.host || 'localhost', + httpPort: config.questdb.httpPort || 9000, + pgPort: config.questdb.pgPort || 8812, + influxPort: (config.questdb as any).influxPort || (config.questdb as any).ilpPort || 9009, + database: config.questdb.database || 'questdb', + } + : undefined; return { redis: config.redis, @@ -177,4 +211,4 @@ export class ServiceContainerBuilder { service: config.service, }; } -} \ No newline at end of file +} diff --git a/libs/core/di/src/container/types.ts b/libs/core/di/src/container/types.ts index 70fe371..5d273be 100644 --- a/libs/core/di/src/container/types.ts +++ b/libs/core/di/src/container/types.ts @@ -1,48 +1,54 @@ -import type { Browser } from '@stock-bot/browser'; -import type { CacheProvider } from '@stock-bot/cache'; -import type { IServiceContainer } from '@stock-bot/types'; -import type { Logger } from '@stock-bot/logger'; -import type { MongoDBClient } from '@stock-bot/mongodb'; -import type { PostgreSQLClient } from '@stock-bot/postgres'; -import type { ProxyManager } from '@stock-bot/proxy'; -import type { QuestDBClient } from '@stock-bot/questdb'; -import type { SmartQueueManager } from '@stock-bot/queue'; -import type { AppConfig } from '../config/schemas'; - -export interface ServiceDefinitions { - // Configuration - config: AppConfig; - logger: Logger; - - // Core services - cache: CacheProvider | null; - globalCache: CacheProvider | null; - proxyManager: ProxyManager | null; - browser: Browser; - queueManager: SmartQueueManager | null; - - // Database clients - mongoClient: MongoDBClient | null; - postgresClient: PostgreSQLClient | null; - questdbClient: QuestDBClient | null; - - // Aggregate service container - serviceContainer: IServiceContainer; -} - -export type ServiceCradle = ServiceDefinitions; - -export interface ServiceContainerOptions { - enableQuestDB?: boolean; - enableMongoDB?: boolean; - enablePostgres?: boolean; - enableCache?: boolean; - enableQueue?: boolean; - enableBrowser?: boolean; - enableProxy?: boolean; -} - -export interface ContainerBuildOptions extends ServiceContainerOptions { - skipInitialization?: boolean; - initializationTimeout?: number; -} \ No newline at end of file +import type { Browser } from '@stock-bot/browser'; +import type { CacheProvider } from '@stock-bot/cache'; +import type { HandlerRegistry } from '@stock-bot/handler-registry'; +import type { Logger } from '@stock-bot/logger'; +import type { MongoDBClient } from '@stock-bot/mongodb'; +import type { PostgreSQLClient } from '@stock-bot/postgres'; +import type { ProxyManager } from '@stock-bot/proxy'; +import type { QuestDBClient } from '@stock-bot/questdb'; +import type { SmartQueueManager } from '@stock-bot/queue'; +import type { IServiceContainer } from '@stock-bot/types'; +import type { AppConfig } from '../config/schemas'; +import type { HandlerScanner } from '../scanner'; + +export interface ServiceDefinitions { + // Configuration + config: AppConfig; + logger: Logger; + + // Handler infrastructure + handlerRegistry: HandlerRegistry; + handlerScanner: HandlerScanner; + + // Core services + cache: CacheProvider | null; + globalCache: CacheProvider | null; + proxyManager: ProxyManager | null; + browser: Browser; + queueManager: SmartQueueManager | null; + + // Database clients + mongoClient: MongoDBClient | null; + postgresClient: PostgreSQLClient | null; + questdbClient: QuestDBClient | null; + + // Aggregate service container + serviceContainer: IServiceContainer; +} + +export type ServiceCradle = ServiceDefinitions; + +export interface ServiceContainerOptions { + enableQuestDB?: boolean; + enableMongoDB?: boolean; + enablePostgres?: boolean; + enableCache?: boolean; + enableQueue?: boolean; + enableBrowser?: boolean; + enableProxy?: boolean; +} + +export interface ContainerBuildOptions extends ServiceContainerOptions { + skipInitialization?: boolean; + initializationTimeout?: number; +} diff --git a/libs/core/di/src/factories/cache.factory.ts b/libs/core/di/src/factories/cache.factory.ts index 819bfb3..c649ec9 100644 --- a/libs/core/di/src/factories/cache.factory.ts +++ b/libs/core/di/src/factories/cache.factory.ts @@ -3,10 +3,7 @@ import { NamespacedCache, type CacheProvider } from '@stock-bot/cache'; import type { ServiceDefinitions } from '../container/types'; export class CacheFactory { - static createNamespacedCache( - baseCache: CacheProvider, - namespace: string - ): NamespacedCache { + static createNamespacedCache(baseCache: CacheProvider, namespace: string): NamespacedCache { return new NamespacedCache(baseCache, namespace); } @@ -15,8 +12,10 @@ export class CacheFactory { serviceName: string ): CacheProvider | null { const baseCache = container.cradle.cache; - if (!baseCache) {return null;} - + if (!baseCache) { + return null; + } + return this.createNamespacedCache(baseCache, serviceName); } @@ -25,8 +24,10 @@ export class CacheFactory { handlerName: string ): CacheProvider | null { const baseCache = container.cradle.cache; - if (!baseCache) {return null;} - + if (!baseCache) { + return null; + } + return this.createNamespacedCache(baseCache, `handler:${handlerName}`); } @@ -35,10 +36,12 @@ export class CacheFactory { prefix: string ): CacheProvider | null { const baseCache = container.cradle.cache; - if (!baseCache) {return null;} - + if (!baseCache) { + return null; + } + // Remove 'cache:' prefix if already included const cleanPrefix = prefix.replace(/^cache:/, ''); return this.createNamespacedCache(baseCache, cleanPrefix); } -} \ No newline at end of file +} diff --git a/libs/core/di/src/factories/index.ts b/libs/core/di/src/factories/index.ts index 83df8b5..1d5232e 100644 --- a/libs/core/di/src/factories/index.ts +++ b/libs/core/di/src/factories/index.ts @@ -1 +1 @@ -export { CacheFactory } from './cache.factory'; \ No newline at end of file +export { CacheFactory } from './cache.factory'; diff --git a/libs/core/di/src/index.ts b/libs/core/di/src/index.ts index 27f5881..37ec6a1 100644 --- a/libs/core/di/src/index.ts +++ b/libs/core/di/src/index.ts @@ -33,3 +33,7 @@ export { type ServiceApplicationConfig, type ServiceLifecycleHooks, } from './service-application'; + +// Handler scanner +export { HandlerScanner } from './scanner'; +export type { HandlerScannerOptions } from './scanner'; diff --git a/libs/core/di/src/registrations/cache.registration.ts b/libs/core/di/src/registrations/cache.registration.ts index aa9c44c..4becde0 100644 --- a/libs/core/di/src/registrations/cache.registration.ts +++ b/libs/core/di/src/registrations/cache.registration.ts @@ -12,26 +12,34 @@ export function registerCacheServices( const { createServiceCache } = require('@stock-bot/queue'); // Get standardized service name from config const serviceName = config.service?.serviceName || config.service?.name || 'unknown'; - + // Create service-specific cache that uses the service's Redis DB - return createServiceCache(serviceName, { - host: config.redis.host, - port: config.redis.port, - password: config.redis.password, - db: config.redis.db, // This will be overridden by ServiceCache - }, { logger }); + return createServiceCache( + serviceName, + { + host: config.redis.host, + port: config.redis.port, + password: config.redis.password, + db: config.redis.db, // This will be overridden by ServiceCache + }, + { logger } + ); }).singleton(), - + // Also provide global cache for shared data globalCache: asFunction(({ logger }) => { const { createServiceCache } = require('@stock-bot/queue'); const serviceName = config.service?.serviceName || config.service?.name || 'unknown'; - - return createServiceCache(serviceName, { - host: config.redis.host, - port: config.redis.port, - password: config.redis.password, - }, { global: true, logger }); + + return createServiceCache( + serviceName, + { + host: config.redis.host, + port: config.redis.port, + password: config.redis.password, + }, + { global: true, logger } + ); }).singleton(), }); } else { @@ -40,4 +48,4 @@ export function registerCacheServices( globalCache: asValue(null), }); } -} \ No newline at end of file +} diff --git a/libs/core/di/src/registrations/core.registration.ts b/libs/core/di/src/registrations/core.registration.ts index 26600c8..19269ff 100644 --- a/libs/core/di/src/registrations/core.registration.ts +++ b/libs/core/di/src/registrations/core.registration.ts @@ -11,4 +11,4 @@ export function registerCoreServices( config: asValue(config), logger: asValue(getLogger('di-container')), }); -} \ No newline at end of file +} diff --git a/libs/core/di/src/registrations/database.registration.ts b/libs/core/di/src/registrations/database.registration.ts index e479213..610da3b 100644 --- a/libs/core/di/src/registrations/database.registration.ts +++ b/libs/core/di/src/registrations/database.registration.ts @@ -1,7 +1,7 @@ +import { asFunction, asValue, type AwilixContainer } from 'awilix'; import { MongoDBClient } from '@stock-bot/mongodb'; import { PostgreSQLClient } from '@stock-bot/postgres'; import { QuestDBClient } from '@stock-bot/questdb'; -import { asFunction, asValue, type AwilixContainer } from 'awilix'; import type { AppConfig } from '../config/schemas'; import type { ServiceDefinitions } from '../container/types'; @@ -14,7 +14,9 @@ export function registerDatabaseServices( container.register({ mongoClient: asFunction(({ logger }) => { // Parse MongoDB URI to extract components - const uriMatch = config.mongodb.uri.match(/mongodb:\/\/(?:([^:]+):([^@]+)@)?([^:/]+):(\d+)\/([^?]+)(?:\?authSource=(.+))?/); + const uriMatch = config.mongodb.uri.match( + /mongodb:\/\/(?:([^:]+):([^@]+)@)?([^:/]+):(\d+)\/([^?]+)(?:\?authSource=(.+))?/ + ); const mongoConfig = { host: uriMatch?.[3] || 'localhost', port: parseInt(uriMatch?.[4] || '27017'), @@ -44,9 +46,9 @@ export function registerDatabaseServices( username: config.postgres.user, password: String(config.postgres.password), // Ensure password is a string }; - - logger.debug('PostgreSQL config:', { - ...pgConfig, + + logger.debug('PostgreSQL config:', { + ...pgConfig, password: pgConfig.password ? '***' : 'NO_PASSWORD', }); return new PostgreSQLClient(pgConfig, logger); @@ -79,4 +81,4 @@ export function registerDatabaseServices( questdbClient: asValue(null), }); } -} \ No newline at end of file +} diff --git a/libs/core/di/src/registrations/index.ts b/libs/core/di/src/registrations/index.ts index db37593..610a68d 100644 --- a/libs/core/di/src/registrations/index.ts +++ b/libs/core/di/src/registrations/index.ts @@ -1,4 +1,4 @@ -export { registerCoreServices } from './core.registration'; -export { registerCacheServices } from './cache.registration'; -export { registerDatabaseServices } from './database.registration'; -export { registerApplicationServices } from './service.registration'; \ No newline at end of file +export { registerCoreServices } from './core.registration'; +export { registerCacheServices } from './cache.registration'; +export { registerDatabaseServices } from './database.registration'; +export { registerApplicationServices } from './service.registration'; diff --git a/libs/core/di/src/registrations/service.registration.ts b/libs/core/di/src/registrations/service.registration.ts index a450395..ed2d588 100644 --- a/libs/core/di/src/registrations/service.registration.ts +++ b/libs/core/di/src/registrations/service.registration.ts @@ -44,9 +44,9 @@ export function registerApplicationServices( enableMetrics: true, logger, }); - + const proxyManager = new ProxyManager(proxyCache, config.proxy, logger); - + // Note: Initialization will be handled by the lifecycle manager return proxyManager; }).singleton(), @@ -60,7 +60,7 @@ export function registerApplicationServices( // Queue Manager if (config.queue?.enabled && config.redis.enabled) { container.register({ - queueManager: asFunction(({ logger }) => { + queueManager: asFunction(({ logger, handlerRegistry }) => { const { SmartQueueManager } = require('@stock-bot/queue'); const queueConfig = { serviceName: config.service?.serviceName || config.service?.name || 'unknown', @@ -79,7 +79,7 @@ export function registerApplicationServices( delayWorkerStart: config.queue!.delayWorkerStart ?? false, autoDiscoverHandlers: true, }; - return new SmartQueueManager(queueConfig, logger); + return new SmartQueueManager(queueConfig, handlerRegistry, logger); }).singleton(), }); } else { @@ -87,4 +87,4 @@ export function registerApplicationServices( queueManager: asValue(null), }); } -} \ No newline at end of file +} diff --git a/libs/core/di/src/scanner/handler-scanner.ts b/libs/core/di/src/scanner/handler-scanner.ts new file mode 100644 index 0000000..40bc994 --- /dev/null +++ b/libs/core/di/src/scanner/handler-scanner.ts @@ -0,0 +1,201 @@ +/** + * Handler Scanner + * Discovers and registers handlers with the DI container + */ + +import { asClass, type AwilixContainer } from 'awilix'; +import { glob } from 'glob'; +import type { + HandlerConfiguration, + HandlerMetadata, + HandlerRegistry, +} from '@stock-bot/handler-registry'; +import { createJobHandler } from '@stock-bot/handlers'; +import { getLogger } from '@stock-bot/logger'; +import type { ExecutionContext, IHandler } from '@stock-bot/types'; + +export interface HandlerScannerOptions { + serviceName?: string; + autoRegister?: boolean; + patterns?: string[]; +} + +export class HandlerScanner { + private logger = getLogger('handler-scanner'); + private discoveredHandlers = new Map(); + + constructor( + private registry: HandlerRegistry, + private container: AwilixContainer, + private options: HandlerScannerOptions = {} + ) {} + + /** + * Scan for handlers matching the given patterns + */ + async scanHandlers(patterns: string[] = this.options.patterns || []): Promise { + this.logger.info('Starting handler scan', { patterns }); + + for (const pattern of patterns) { + const files = await glob(pattern, { absolute: true }); + this.logger.debug(`Found ${files.length} files for pattern: ${pattern}`); + + for (const file of files) { + try { + await this.scanFile(file); + } catch (error) { + this.logger.error('Failed to scan file', { file, error }); + } + } + } + + this.logger.info('Handler scan complete', { + discovered: this.discoveredHandlers.size, + patterns, + }); + } + + /** + * Scan a single file for handlers + */ + private async scanFile(filePath: string): Promise { + try { + const module = await import(filePath); + this.registerHandlersFromModule(module, filePath); + } catch (error) { + this.logger.error('Failed to import module', { filePath, error }); + } + } + + /** + * Register handlers found in a module + */ + private registerHandlersFromModule(module: any, filePath: string): void { + for (const [exportName, exported] of Object.entries(module)) { + if (this.isHandler(exported)) { + this.registerHandler(exported, exportName, filePath); + } + } + } + + /** + * Check if an exported value is a handler + */ + private isHandler(exported: any): boolean { + if (typeof exported !== 'function') return false; + + // Check for handler metadata added by decorators + const hasHandlerName = !!(exported as any).__handlerName; + const hasOperations = Array.isArray((exported as any).__operations); + + return hasHandlerName && hasOperations; + } + + /** + * Register a handler with the registry and DI container + */ + private registerHandler(HandlerClass: any, exportName: string, filePath: string): void { + const handlerName = HandlerClass.__handlerName; + const operations = HandlerClass.__operations || []; + const schedules = HandlerClass.__schedules || []; + const isDisabled = HandlerClass.__disabled || false; + + if (isDisabled) { + this.logger.debug('Skipping disabled handler', { handlerName }); + return; + } + + // Build metadata + const metadata: HandlerMetadata = { + name: handlerName, + service: this.options.serviceName, + operations: operations.map((op: any) => ({ + name: op.name, + method: op.method, + })), + schedules: schedules.map((schedule: any) => ({ + operation: schedule.operation, + cronPattern: schedule.cronPattern, + priority: schedule.priority, + immediately: schedule.immediately, + description: schedule.description, + })), + }; + + // Build configuration with operation handlers + const operationHandlers: Record = {}; + for (const op of operations) { + operationHandlers[op.name] = createJobHandler(async payload => { + const handler = this.container.resolve(handlerName); + const context: ExecutionContext = { + type: 'queue', + metadata: { source: 'queue', timestamp: Date.now() }, + }; + return await handler.execute(op.name, payload, context); + }); + } + + const configuration: HandlerConfiguration = { + name: handlerName, + operations: operationHandlers, + scheduledJobs: schedules.map((schedule: any) => { + const operation = operations.find((op: any) => op.method === schedule.operation); + return { + type: `${handlerName}-${schedule.operation}`, + operation: operation?.name || schedule.operation, + cronPattern: schedule.cronPattern, + priority: schedule.priority || 5, + immediately: schedule.immediately || false, + description: schedule.description || `${handlerName} ${schedule.operation}`, + }; + }), + }; + + // Register with registry + this.registry.register(metadata, configuration); + + // Register with DI container if auto-register is enabled + if (this.options.autoRegister !== false) { + this.container.register({ + [handlerName]: asClass(HandlerClass).singleton(), + }); + } + + // Track discovered handler + this.discoveredHandlers.set(handlerName, HandlerClass); + + this.logger.info('Registered handler', { + handlerName, + exportName, + filePath, + operations: operations.length, + schedules: schedules.length, + service: this.options.serviceName, + }); + } + + /** + * Get all discovered handlers + */ + getDiscoveredHandlers(): Map { + return new Map(this.discoveredHandlers); + } + + /** + * Manually register a handler class + */ + registerHandlerClass(HandlerClass: any, options: { serviceName?: string } = {}): void { + const serviceName = options.serviceName || this.options.serviceName; + const originalServiceName = this.options.serviceName; + + // Temporarily override service name if provided + if (serviceName) { + this.options.serviceName = serviceName; + } + + this.registerHandler(HandlerClass, HandlerClass.name, 'manual'); + + // Restore original service name + this.options.serviceName = originalServiceName; + } +} diff --git a/libs/core/di/src/scanner/index.ts b/libs/core/di/src/scanner/index.ts new file mode 100644 index 0000000..26306d7 --- /dev/null +++ b/libs/core/di/src/scanner/index.ts @@ -0,0 +1,2 @@ +export { HandlerScanner } from './handler-scanner'; +export type { HandlerScannerOptions } from './handler-scanner'; diff --git a/libs/core/di/src/service-application.ts b/libs/core/di/src/service-application.ts index e10c2a2..74b841e 100644 --- a/libs/core/di/src/service-application.ts +++ b/libs/core/di/src/service-application.ts @@ -5,12 +5,14 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; -import { getLogger, setLoggerConfig, shutdownLoggers, type Logger } from '@stock-bot/logger'; -import { Shutdown } from '@stock-bot/shutdown'; import type { BaseAppConfig, UnifiedAppConfig } from '@stock-bot/config'; import { toUnifiedConfig } from '@stock-bot/config'; +import { getLogger, setLoggerConfig, shutdownLoggers, type Logger } from '@stock-bot/logger'; +import { Shutdown } from '@stock-bot/shutdown'; import type { IServiceContainer } from '@stock-bot/types'; -import type { ServiceContainer } from './awilix-container'; +import type { HandlerRegistry } from '@stock-bot/handler-registry'; +import type { ServiceDefinitions } from './container/types'; +import type { AwilixContainer } from 'awilix'; /** * Configuration for ServiceApplication @@ -18,26 +20,26 @@ import type { ServiceContainer } from './awilix-container'; export interface ServiceApplicationConfig { /** Service name for logging and identification */ serviceName: string; - + /** CORS configuration - if not provided, uses permissive defaults */ corsConfig?: Parameters[0]; - + /** Whether to enable handler initialization */ enableHandlers?: boolean; - + /** Whether to enable scheduled job creation */ enableScheduledJobs?: boolean; - + /** Custom shutdown timeout in milliseconds */ shutdownTimeout?: number; - + /** Service metadata for info endpoint */ serviceMetadata?: { version?: string; description?: string; endpoints?: Record; }; - + /** Whether to add a basic info endpoint at root */ addInfoEndpoint?: boolean; } @@ -48,16 +50,16 @@ export interface ServiceApplicationConfig { export interface ServiceLifecycleHooks { /** Called after container is created but before routes */ onContainerReady?: (container: IServiceContainer) => Promise | void; - + /** Called after app is created but before routes are mounted */ onAppReady?: (app: Hono, container: IServiceContainer) => Promise | void; - + /** Called after routes are mounted but before server starts */ onBeforeStart?: (app: Hono, container: IServiceContainer) => Promise | void; - + /** Called after successful server startup */ onStarted?: (port: number) => Promise | void; - + /** Called during shutdown before cleanup */ onBeforeShutdown?: () => Promise | void; } @@ -70,13 +72,13 @@ export class ServiceApplication { private serviceConfig: ServiceApplicationConfig; private hooks: ServiceLifecycleHooks; private logger: Logger; - - private container: ServiceContainer | null = null; + + private container: AwilixContainer | null = null; private serviceContainer: IServiceContainer | null = null; private app: Hono | null = null; private server: ReturnType | null = null; private shutdown: Shutdown; - + constructor( config: BaseAppConfig | UnifiedAppConfig, serviceConfig: ServiceApplicationConfig, @@ -84,12 +86,12 @@ export class ServiceApplication { ) { // Convert to unified config this.config = toUnifiedConfig(config); - + // Ensure service name is set in config if (!this.config.service.serviceName) { this.config.service.serviceName = serviceConfig.serviceName; } - + this.serviceConfig = { shutdownTimeout: 15000, enableHandlers: false, @@ -98,17 +100,17 @@ export class ServiceApplication { ...serviceConfig, }; this.hooks = hooks; - + // Initialize logger configuration this.configureLogger(); this.logger = getLogger(this.serviceConfig.serviceName); - + // Initialize shutdown manager - this.shutdown = Shutdown.getInstance({ - timeout: this.serviceConfig.shutdownTimeout + this.shutdown = Shutdown.getInstance({ + timeout: this.serviceConfig.shutdownTimeout, }); } - + /** * Configure logger based on application config */ @@ -123,13 +125,13 @@ export class ServiceApplication { }); } } - + /** * Create and configure Hono application with CORS */ private createApp(): Hono { const app = new Hono(); - + // Add CORS middleware with service-specific or default configuration const corsConfig = this.serviceConfig.corsConfig || { origin: '*', @@ -137,9 +139,9 @@ export class ServiceApplication { allowHeaders: ['Content-Type', 'Authorization'], credentials: false, }; - + app.use('*', cors(corsConfig)); - + // Add basic info endpoint if enabled if (this.serviceConfig.addInfoEndpoint) { const metadata = this.serviceConfig.serviceMetadata || {}; @@ -154,10 +156,10 @@ export class ServiceApplication { }); }); } - + return app; } - + /** * Register graceful shutdown handlers */ @@ -177,7 +179,7 @@ export class ServiceApplication { } }, 'Queue System'); } - + // Priority 1: HTTP Server (high priority) this.shutdown.onShutdownHigh(async () => { if (this.server) { @@ -190,7 +192,7 @@ export class ServiceApplication { } } }, 'HTTP Server'); - + // Custom shutdown hook if (this.hooks.onBeforeShutdown) { this.shutdown.onShutdownHigh(async () => { @@ -201,7 +203,7 @@ export class ServiceApplication { } }, 'Custom Shutdown'); } - + // Priority 2: Services and connections (medium priority) this.shutdown.onShutdownMedium(async () => { this.logger.info('Disposing services and connections...'); @@ -212,24 +214,24 @@ export class ServiceApplication { if (mongoClient?.disconnect) { await mongoClient.disconnect(); } - + const postgresClient = this.container.resolve('postgresClient'); if (postgresClient?.disconnect) { await postgresClient.disconnect(); } - + const questdbClient = this.container.resolve('questdbClient'); if (questdbClient?.disconnect) { await questdbClient.disconnect(); } - + this.logger.info('All services disposed successfully'); } } catch (error) { this.logger.error('Error disposing services', { error }); } }, 'Services'); - + // Priority 3: Logger shutdown (lowest priority - runs last) this.shutdown.onShutdownLow(async () => { try { @@ -241,62 +243,62 @@ export class ServiceApplication { } }, 'Loggers'); } - + /** * Start the service with full initialization */ async start( - containerFactory: (config: UnifiedAppConfig) => Promise, + containerFactory: (config: UnifiedAppConfig) => Promise>, routeFactory: (container: IServiceContainer) => Hono, handlerInitializer?: (container: IServiceContainer) => Promise ): Promise { this.logger.info(`Initializing ${this.serviceConfig.serviceName} service...`); - + try { // Create and initialize container this.logger.debug('Creating DI container...'); // Config already has service name from constructor this.container = await containerFactory(this.config); - this.serviceContainer = this.container.resolve('serviceContainer'); + this.serviceContainer = this.container!.resolve('serviceContainer'); this.logger.info('DI container created and initialized'); - + // Call container ready hook if (this.hooks.onContainerReady) { await this.hooks.onContainerReady(this.serviceContainer); } - + // Create Hono application this.app = this.createApp(); - + // Call app ready hook if (this.hooks.onAppReady) { await this.hooks.onAppReady(this.app, this.serviceContainer); } - + // Initialize handlers if enabled if (this.serviceConfig.enableHandlers && handlerInitializer) { this.logger.debug('Initializing handlers...'); await handlerInitializer(this.serviceContainer); this.logger.info('Handlers initialized'); } - + // Create and mount routes const routes = routeFactory(this.serviceContainer); this.app.route('/', routes); - + // Initialize scheduled jobs if enabled if (this.serviceConfig.enableScheduledJobs) { await this.initializeScheduledJobs(); } - + // Call before start hook if (this.hooks.onBeforeStart) { await this.hooks.onBeforeStart(this.app, this.serviceContainer); } - + // Register shutdown handlers this.registerShutdownHandlers(); - + // Start HTTP server const port = this.config.service.port; this.server = Bun.serve({ @@ -304,14 +306,13 @@ export class ServiceApplication { fetch: this.app.fetch, development: this.config.environment === 'development', }); - + this.logger.info(`${this.serviceConfig.serviceName} service started on port ${port}`); - + // Call started hook if (this.hooks.onStarted) { await this.hooks.onStarted(port); } - } catch (error) { this.logger.error('DETAILED ERROR:', error); this.logger.error('Failed to start service', { @@ -322,7 +323,7 @@ export class ServiceApplication { throw error; } } - + /** * Initialize scheduled jobs from handler registry */ @@ -330,17 +331,17 @@ export class ServiceApplication { if (!this.container) { throw new Error('Container not initialized'); } - + this.logger.debug('Creating scheduled jobs from registered handlers...'); - const { handlerRegistry } = await import('@stock-bot/handlers'); + const handlerRegistry = this.container.resolve('handlerRegistry'); const allHandlers = handlerRegistry.getAllHandlersWithSchedule(); - + let totalScheduledJobs = 0; for (const [handlerName, config] of allHandlers) { if (config.scheduledJobs && config.scheduledJobs.length > 0) { // Check if this handler belongs to the current service const ownerService = handlerRegistry.getHandlerService(handlerName); - + if (ownerService !== this.config.service.serviceName) { this.logger.trace('Skipping scheduled jobs for handler from different service', { handler: handlerName, @@ -349,14 +350,14 @@ export class ServiceApplication { }); continue; } - + const queueManager = this.container.resolve('queueManager'); if (!queueManager) { this.logger.error('Queue manager is not initialized, cannot create scheduled jobs'); continue; } const queue = queueManager.getQueue(handlerName); - + for (const scheduledJob of config.scheduledJobs) { // Include handler and operation info in job data const jobData = { @@ -364,7 +365,7 @@ export class ServiceApplication { operation: scheduledJob.operation, payload: scheduledJob.payload, }; - + // Build job options from scheduled job config const jobOptions = { priority: scheduledJob.priority, @@ -373,7 +374,7 @@ export class ServiceApplication { immediately: scheduledJob.immediately, }, }; - + await queue.addScheduledJob( scheduledJob.operation, jobData, @@ -392,7 +393,7 @@ export class ServiceApplication { } } this.logger.info('Scheduled jobs created', { totalJobs: totalScheduledJobs }); - + // Start queue workers this.logger.debug('Starting queue workers...'); const queueManager = this.container.resolve('queueManager'); @@ -401,7 +402,7 @@ export class ServiceApplication { this.logger.info('Queue workers started'); } } - + /** * Stop the service gracefully */ @@ -409,18 +410,18 @@ export class ServiceApplication { this.logger.info(`Stopping ${this.serviceConfig.serviceName} service...`); await this.shutdown.shutdown(); } - + /** * Get the service container (for testing or advanced use cases) */ getServiceContainer(): IServiceContainer | null { return this.serviceContainer; } - + /** * Get the Hono app (for testing or advanced use cases) */ getApp(): Hono | null { return this.app; } -} \ No newline at end of file +} diff --git a/libs/core/di/src/utils/lifecycle.ts b/libs/core/di/src/utils/lifecycle.ts index b415b85..35ee7f7 100644 --- a/libs/core/di/src/utils/lifecycle.ts +++ b/libs/core/di/src/utils/lifecycle.ts @@ -1,6 +1,6 @@ import type { AwilixContainer } from 'awilix'; -import type { ServiceDefinitions } from '../container/types'; import { getLogger } from '@stock-bot/logger'; +import type { ServiceDefinitions } from '../container/types'; interface ServiceWithLifecycle { connect?: () => Promise; @@ -29,13 +29,16 @@ export class ServiceLifecycleManager { for (const { name, key } of this.services) { const service = container.cradle[key] as ServiceWithLifecycle | null; - + if (service) { const initPromise = this.initializeService(name, service); initPromises.push( Promise.race([ initPromise, - this.createTimeoutPromise(timeout, `${name} initialization timed out after ${timeout}ms`), + this.createTimeoutPromise( + timeout, + `${name} initialization timed out after ${timeout}ms` + ), ]) ); } @@ -51,7 +54,7 @@ export class ServiceLifecycleManager { // Shutdown in reverse order for (const { name, key } of [...this.services].reverse()) { const service = container.cradle[key] as ServiceWithLifecycle | null; - + if (service) { shutdownPromises.push(this.shutdownService(name, service)); } diff --git a/libs/core/handler-registry/package.json b/libs/core/handler-registry/package.json new file mode 100644 index 0000000..3baa662 --- /dev/null +++ b/libs/core/handler-registry/package.json @@ -0,0 +1,27 @@ +{ + "name": "@stock-bot/handler-registry", + "version": "1.0.0", + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "import": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "scripts": { + "build": "bun run build:clean && bun run build:tsc", + "build:clean": "rm -rf dist", + "build:tsc": "tsc", + "test": "bun test", + "clean": "rm -rf dist node_modules .turbo" + }, + "dependencies": { + "@stock-bot/types": "workspace:*" + }, + "devDependencies": { + "@types/bun": "*", + "typescript": "*" + } +} diff --git a/libs/core/handler-registry/src/index.ts b/libs/core/handler-registry/src/index.ts new file mode 100644 index 0000000..924a74c --- /dev/null +++ b/libs/core/handler-registry/src/index.ts @@ -0,0 +1,14 @@ +/** + * Handler Registry Package + * Provides centralized handler registration without circular dependencies + */ + +export { HandlerRegistry } from './registry'; +export type { + HandlerMetadata, + OperationMetadata, + ScheduleMetadata, + HandlerConfiguration, + RegistryStats, + HandlerDiscoveryResult, +} from './types'; diff --git a/libs/core/handler-registry/src/registry.ts b/libs/core/handler-registry/src/registry.ts new file mode 100644 index 0000000..6f65b94 --- /dev/null +++ b/libs/core/handler-registry/src/registry.ts @@ -0,0 +1,226 @@ +/** + * Handler Registry Implementation + * Manages handler metadata and configuration without circular dependencies + */ + +import type { JobHandler, ScheduledJob } from '@stock-bot/types'; +import type { + HandlerConfiguration, + HandlerMetadata, + OperationMetadata, + RegistryStats, + ScheduleMetadata, +} from './types'; + +export class HandlerRegistry { + private handlers = new Map(); + private configurations = new Map(); + private handlerServices = new Map(); + + /** + * Register handler metadata + */ + registerMetadata(metadata: HandlerMetadata): void { + this.handlers.set(metadata.name, metadata); + + if (metadata.service) { + this.handlerServices.set(metadata.name, metadata.service); + } + } + + /** + * Register handler configuration with operation implementations + */ + registerConfiguration(config: HandlerConfiguration): void { + this.configurations.set(config.name, config); + } + + /** + * Register both metadata and configuration + */ + register(metadata: HandlerMetadata, config: HandlerConfiguration): void { + this.registerMetadata(metadata); + this.registerConfiguration(config); + } + + /** + * Get handler metadata + */ + getMetadata(handlerName: string): HandlerMetadata | undefined { + return this.handlers.get(handlerName); + } + + /** + * Get handler configuration + */ + getConfiguration(handlerName: string): HandlerConfiguration | undefined { + return this.configurations.get(handlerName); + } + + /** + * Get a specific operation handler + */ + getOperation(handlerName: string, operationName: string): JobHandler | undefined { + const config = this.configurations.get(handlerName); + return config?.operations[operationName]; + } + + /** + * Get all handler metadata + */ + getAllMetadata(): Map { + return new Map(this.handlers); + } + + /** + * Get all handler names + */ + getHandlerNames(): string[] { + return Array.from(this.handlers.keys()); + } + + /** + * Check if a handler is registered + */ + hasHandler(handlerName: string): boolean { + return this.handlers.has(handlerName); + } + + /** + * Get handlers for a specific service + */ + getServiceHandlers(serviceName: string): HandlerMetadata[] { + const handlers: HandlerMetadata[] = []; + + for (const [handlerName, service] of this.handlerServices) { + if (service === serviceName) { + const metadata = this.handlers.get(handlerName); + if (metadata) { + handlers.push(metadata); + } + } + } + + return handlers; + } + + /** + * Set service ownership for a handler + */ + setHandlerService(handlerName: string, serviceName: string): void { + this.handlerServices.set(handlerName, serviceName); + + // Update metadata if it exists + const metadata = this.handlers.get(handlerName); + if (metadata) { + metadata.service = serviceName; + } + } + + /** + * Get the service that owns a handler + */ + getHandlerService(handlerName: string): string | undefined { + return this.handlerServices.get(handlerName); + } + + /** + * Get scheduled jobs for a handler + */ + getScheduledJobs(handlerName: string): ScheduledJob[] { + const config = this.configurations.get(handlerName); + return config?.scheduledJobs || []; + } + + /** + * Get all handlers with their scheduled jobs + */ + getAllHandlersWithSchedule(): Map< + string, + { metadata: HandlerMetadata; scheduledJobs: ScheduledJob[] } + > { + const result = new Map(); + + for (const [name, metadata] of this.handlers) { + const config = this.configurations.get(name); + result.set(name, { + metadata, + scheduledJobs: config?.scheduledJobs || [], + }); + } + + return result; + } + + /** + * Get registry statistics + */ + getStats(): RegistryStats { + let operationCount = 0; + let scheduledJobCount = 0; + const services = new Set(); + + for (const metadata of this.handlers.values()) { + operationCount += metadata.operations.length; + scheduledJobCount += metadata.schedules?.length || 0; + + if (metadata.service) { + services.add(metadata.service); + } + } + + return { + handlers: this.handlers.size, + operations: operationCount, + scheduledJobs: scheduledJobCount, + services: services.size, + }; + } + + /** + * Clear all registrations (useful for testing) + */ + clear(): void { + this.handlers.clear(); + this.configurations.clear(); + this.handlerServices.clear(); + } + + /** + * Export registry data for debugging or persistence + */ + export(): { + handlers: Array<[string, HandlerMetadata]>; + configurations: Array<[string, HandlerConfiguration]>; + services: Array<[string, string]>; + } { + return { + handlers: Array.from(this.handlers.entries()), + configurations: Array.from(this.configurations.entries()), + services: Array.from(this.handlerServices.entries()), + }; + } + + /** + * Import registry data + */ + import(data: { + handlers: Array<[string, HandlerMetadata]>; + configurations: Array<[string, HandlerConfiguration]>; + services: Array<[string, string]>; + }): void { + this.clear(); + + for (const [name, metadata] of data.handlers) { + this.handlers.set(name, metadata); + } + + for (const [name, config] of data.configurations) { + this.configurations.set(name, config); + } + + for (const [handler, service] of data.services) { + this.handlerServices.set(handler, service); + } + } +} diff --git a/libs/core/handler-registry/src/types.ts b/libs/core/handler-registry/src/types.ts new file mode 100644 index 0000000..24544de --- /dev/null +++ b/libs/core/handler-registry/src/types.ts @@ -0,0 +1,66 @@ +/** + * Handler Registry Types + * Pure types for handler metadata and registration + */ + +import type { JobHandler, ScheduledJob } from '@stock-bot/types'; + +/** + * Metadata for a single operation within a handler + */ +export interface OperationMetadata { + name: string; + method: string; + description?: string; +} + +/** + * Metadata for a scheduled operation + */ +export interface ScheduleMetadata { + operation: string; + cronPattern: string; + priority?: number; + immediately?: boolean; + description?: string; +} + +/** + * Complete metadata for a handler + */ +export interface HandlerMetadata { + name: string; + service?: string; + operations: OperationMetadata[]; + schedules?: ScheduleMetadata[]; + version?: string; + description?: string; +} + +/** + * Handler configuration with operation implementations + */ +export interface HandlerConfiguration { + name: string; + operations: Record; + scheduledJobs?: ScheduledJob[]; +} + +/** + * Registry statistics + */ +export interface RegistryStats { + handlers: number; + operations: number; + scheduledJobs: number; + services: number; +} + +/** + * Handler discovery result + */ +export interface HandlerDiscoveryResult { + handler: HandlerMetadata; + constructor: any; + filePath?: string; +} diff --git a/libs/core/handler-registry/tsconfig.json b/libs/core/handler-registry/tsconfig.json new file mode 100644 index 0000000..8460633 --- /dev/null +++ b/libs/core/handler-registry/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../../tsconfig.lib.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "test/**/*", "**/*.test.ts", "**/*.spec.ts"] +} diff --git a/libs/core/handlers/package.json b/libs/core/handlers/package.json index 5ea2226..3a29904 100644 --- a/libs/core/handlers/package.json +++ b/libs/core/handlers/package.json @@ -10,10 +10,11 @@ "test": "bun test" }, "dependencies": { + "@stock-bot/cache": "workspace:*", "@stock-bot/config": "workspace:*", + "@stock-bot/handler-registry": "workspace:*", "@stock-bot/logger": "workspace:*", "@stock-bot/types": "workspace:*", - "@stock-bot/cache": "workspace:*", "@stock-bot/utils": "workspace:*", "mongodb": "^6.12.0" }, diff --git a/libs/core/handlers/src/base/BaseHandler.ts b/libs/core/handlers/src/base/BaseHandler.ts index 015ef92..b9f3273 100644 --- a/libs/core/handlers/src/base/BaseHandler.ts +++ b/libs/core/handlers/src/base/BaseHandler.ts @@ -1,14 +1,17 @@ import type { Collection } from 'mongodb'; +import { createNamespacedCache } from '@stock-bot/cache'; import { getLogger } from '@stock-bot/logger'; -import type { - HandlerConfigWithSchedule, - IServiceContainer, +import type { ExecutionContext, - IHandler + HandlerConfigWithSchedule, + HandlerMetadata, + IHandler, + IServiceContainer, + JobHandler, + ServiceTypes, } from '@stock-bot/types'; import { fetch } from '@stock-bot/utils'; -import { createNamespacedCache } from '@stock-bot/cache'; -import { handlerRegistry } from '../registry/handler-registry'; +// Handler registry is now injected, not imported import { createJobHandler } from '../utils/create-job-handler'; /** @@ -38,16 +41,16 @@ export interface JobScheduleOptions { * Provides common functionality and structure for queue/event operations */ export abstract class BaseHandler implements IHandler { - // Direct service properties - flattened for cleaner access - readonly logger; - readonly cache; - readonly globalCache; - readonly queue; - readonly proxy; - readonly browser; - readonly mongodb; - readonly postgres; - readonly questdb; + // Direct service properties - flattened for cleaner access with proper types + readonly logger: ServiceTypes['logger']; + readonly cache: ServiceTypes['cache']; + readonly globalCache: ServiceTypes['globalCache']; + readonly queue: ServiceTypes['queue']; + readonly proxy: ServiceTypes['proxy']; + readonly browser: ServiceTypes['browser']; + readonly mongodb: ServiceTypes['mongodb']; + readonly postgres: ServiceTypes['postgres']; + readonly questdb: ServiceTypes['questdb']; private handlerName: string; @@ -109,8 +112,8 @@ export abstract class BaseHandler implements IHandler { } async scheduleOperation( - operation: string, - payload: unknown, + operation: string, + payload: unknown, options?: JobScheduleOptions ): Promise { if (!this.queue) { @@ -122,7 +125,7 @@ export abstract class BaseHandler implements IHandler { operation, payload, }; - + await queue.add(operation, jobData, options || {}); } @@ -162,7 +165,7 @@ export abstract class BaseHandler implements IHandler { * Example: handler 'webshare' creates namespace 'webshare:api' -> keys will be 'cache:data-ingestion:webshare:api:*' */ protected createNamespacedCache(subNamespace: string) { - return createNamespacedCache(this.cache, `${this.handlerName}:${subNamespace}`); + return createNamespacedCache(this.cache || null, `${this.handlerName}:${subNamespace}`); } /** @@ -197,36 +200,36 @@ export abstract class BaseHandler implements IHandler { // Don't add 'cache:' prefix since the cache already has its own prefix return this.cache.del(`${this.handlerName}:${key}`); } - - /** - * Set global cache with key - */ - protected async globalCacheSet(key: string, value: any, ttl?: number): Promise { - if (!this.globalCache) { - return; - } - return this.globalCache.set(key, value, ttl); + + /** + * Set global cache with key + */ + protected async globalCacheSet(key: string, value: any, ttl?: number): Promise { + if (!this.globalCache) { + return; } - - /** - * Get global cache with key - */ - protected async globalCacheGet(key: string): Promise { - if (!this.globalCache) { - return null; - } - return this.globalCache.get(key); + return this.globalCache.set(key, value, ttl); + } + + /** + * Get global cache with key + */ + protected async globalCacheGet(key: string): Promise { + if (!this.globalCache) { + return null; } - - /** - * Delete global cache with key - */ - protected async globalCacheDel(key: string): Promise { - if (!this.globalCache) { - return; - } - return this.globalCache.del(key); + return this.globalCache.get(key); + } + + /** + * Delete global cache with key + */ + protected async globalCacheDel(key: string): Promise { + if (!this.globalCache) { + return; } + return this.globalCache.del(key); + } /** * Schedule operation with delay in seconds */ @@ -238,7 +241,7 @@ export abstract class BaseHandler implements IHandler { ): Promise { return this.scheduleOperation(operation, payload, { delay: delaySeconds * 1000, - ...additionalOptions + ...additionalOptions, }); } @@ -294,27 +297,45 @@ export abstract class BaseHandler implements IHandler { // } /** - * Register this handler using decorator metadata - * Automatically reads @Handler, @Operation, and @QueueSchedule decorators + * Create handler configuration with job handlers + * This is used by the scanner to create the actual handler configuration */ - register(serviceName?: string): void { - const constructor = this.constructor as any; - const handlerName = constructor.__handlerName || this.handlerName; - const operations = constructor.__operations || []; - const schedules = constructor.__schedules || []; + createHandlerConfig(): HandlerConfigWithSchedule { + const metadata = (this.constructor as typeof BaseHandler).extractMetadata(); + if (!metadata) { + throw new Error('Handler metadata not found'); + } - // Create operation handlers from decorator metadata - const operationHandlers: Record = {}; - for (const op of operations) { - operationHandlers[op.name] = createJobHandler(async payload => { + const operationHandlers: Record = {}; + for (const opName of metadata.operations) { + operationHandlers[opName] = createJobHandler(async (payload: any) => { const context: ExecutionContext = { type: 'queue', metadata: { source: 'queue', timestamp: Date.now() }, }; - return await this.execute(op.name, payload, context); + return await this.execute(opName, payload, context); }); } + return { + name: metadata.name, + operations: operationHandlers, + scheduledJobs: metadata.scheduledJobs, + }; + } + + /** + * Extract handler metadata from decorators + * This returns metadata only - actual handler instances are created by the scanner + */ + static extractMetadata(): HandlerMetadata | null { + const constructor = this as any; + const handlerName = constructor.__handlerName; + if (!handlerName) return null; + + const operations = constructor.__operations || []; + const schedules = constructor.__schedules || []; + // Create scheduled jobs from decorator metadata const scheduledJobs = schedules.map((schedule: any) => { // Find the operation name from the method name @@ -326,27 +347,15 @@ export abstract class BaseHandler implements IHandler { priority: schedule.priority || 5, immediately: schedule.immediately || false, description: schedule.description || `${handlerName} ${schedule.operation}`, - payload: this.getScheduledJobPayload?.(schedule.operation), }; }); - const config: HandlerConfigWithSchedule = { + return { name: handlerName, - operations: operationHandlers, + operations: operations.map((op: any) => op.name), scheduledJobs, + description: constructor.__description, }; - - handlerRegistry.registerWithSchedule(config, serviceName); - this.logger.info('Handler registered using decorator metadata', { - handlerName, - service: serviceName, - operations: operations.map((op: any) => ({ name: op.name, method: op.method })), - scheduledJobs: scheduledJobs.map((job: any) => ({ - operation: job.operation, - cronPattern: job.cronPattern, - immediately: job.immediately, - })), - }); } /** diff --git a/libs/core/handlers/src/index.ts b/libs/core/handlers/src/index.ts index f531f38..d49f9b2 100644 --- a/libs/core/handlers/src/index.ts +++ b/libs/core/handlers/src/index.ts @@ -2,8 +2,7 @@ export { BaseHandler, ScheduledHandler } from './base/BaseHandler'; export type { JobScheduleOptions } from './base/BaseHandler'; -// Handler registry -export { handlerRegistry } from './registry/handler-registry'; +// Handler registry is now in a separate package // Utilities export { createJobHandler } from './utils/create-job-handler'; diff --git a/libs/core/handlers/src/registry/auto-register.ts b/libs/core/handlers/src/registry/auto-register.ts index 7337875..3566284 100644 --- a/libs/core/handlers/src/registry/auto-register.ts +++ b/libs/core/handlers/src/registry/auto-register.ts @@ -6,8 +6,8 @@ import { readdirSync, statSync } from 'fs'; import { join, relative } from 'path'; import { getLogger } from '@stock-bot/logger'; -import { BaseHandler } from '../base/BaseHandler'; import type { IServiceContainer } from '@stock-bot/types'; +import { BaseHandler } from '../base/BaseHandler'; const logger = getLogger('handler-auto-register'); @@ -123,14 +123,13 @@ export async function autoRegisterHandlers( } else { logger.info(`Registering handler: ${handlerName} from ${relativePath}`); - // Create instance and register + // Create instance - handlers now auto-register via decorators const handler = new HandlerClass(services); - handler.register(serviceName); - - // No need to set service ownership separately - it's done in register() registered.push(handlerName); - logger.info(`Successfully registered handler: ${handlerName}`, { service: serviceName }); + logger.info(`Successfully registered handler: ${handlerName}`, { + service: serviceName, + }); } } } catch (error) { diff --git a/libs/core/handlers/src/registry/handler-registry.ts b/libs/core/handlers/src/registry/handler-registry.ts.old similarity index 100% rename from libs/core/handlers/src/registry/handler-registry.ts rename to libs/core/handlers/src/registry/handler-registry.ts.old diff --git a/libs/core/handlers/src/utils/create-job-handler.ts b/libs/core/handlers/src/utils/create-job-handler.ts index 7f5012e..3a73913 100644 --- a/libs/core/handlers/src/utils/create-job-handler.ts +++ b/libs/core/handlers/src/utils/create-job-handler.ts @@ -1,16 +1,16 @@ -/** - * Utility for creating typed job handlers - */ - -import type { JobHandler, TypedJobHandler } from '@stock-bot/types'; - -/** - * Create a typed job handler with validation - */ -export function createJobHandler( - handler: TypedJobHandler -): JobHandler { - return async (payload: unknown): Promise => { - return handler(payload as TPayload); - }; -} \ No newline at end of file +/** + * Utility for creating typed job handlers + */ + +import type { JobHandler, TypedJobHandler } from '@stock-bot/types'; + +/** + * Create a typed job handler with validation + */ +export function createJobHandler( + handler: TypedJobHandler +): JobHandler { + return async (payload: unknown): Promise => { + return handler(payload as TPayload); + }; +} diff --git a/libs/core/queue/package.json b/libs/core/queue/package.json index db2c01e..afee7d0 100644 --- a/libs/core/queue/package.json +++ b/libs/core/queue/package.json @@ -14,9 +14,9 @@ "ioredis": "^5.3.0", "rate-limiter-flexible": "^3.0.0", "@stock-bot/cache": "*", + "@stock-bot/handler-registry": "*", "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "@stock-bot/handlers": "*" + "@stock-bot/types": "*" }, "devDependencies": { "typescript": "^5.3.0", diff --git a/libs/core/queue/src/batch-processor.ts b/libs/core/queue/src/batch-processor.ts index a69116e..f504142 100644 --- a/libs/core/queue/src/batch-processor.ts +++ b/libs/core/queue/src/batch-processor.ts @@ -171,7 +171,11 @@ async function processBatched( /** * Process a batch job - loads items and creates individual jobs */ -export async function processBatchJob(jobData: BatchJobData, queueName: string, queueManager: QueueManager): Promise { +export async function processBatchJob( + jobData: BatchJobData, + queueName: string, + queueManager: QueueManager +): Promise { const queue = queueManager.getQueue(queueName); const logger = queue.createChildLogger('batch-job', { queueName, @@ -304,7 +308,11 @@ async function loadPayload( } | null; } -async function cleanupPayload(key: string, queueName: string, queueManager: QueueManager): Promise { +async function cleanupPayload( + key: string, + queueName: string, + queueManager: QueueManager +): Promise { const cache = queueManager.getCache(queueName); await cache.del(key); } diff --git a/libs/core/queue/src/index.ts b/libs/core/queue/src/index.ts index 2f986af..39e70df 100644 --- a/libs/core/queue/src/index.ts +++ b/libs/core/queue/src/index.ts @@ -4,15 +4,15 @@ export { QueueManager } from './queue-manager'; export { SmartQueueManager } from './smart-queue-manager'; export { ServiceCache, createServiceCache } from './service-cache'; // Service utilities -export { +export { normalizeServiceName, generateCachePrefix, getFullQueueName, - parseQueueName + parseQueueName, } from './service-utils'; -// Re-export handler registry and utilities from handlers package -export { handlerRegistry, createJobHandler } from '@stock-bot/handlers'; +// Re-export utilities from handlers package +export { createJobHandler } from '@stock-bot/handlers'; // Batch processing export { processBatchJob, processItems } from './batch-processor'; @@ -64,10 +64,8 @@ export type { // Scheduled job types ScheduledJob, ScheduleConfig, - + // Smart Queue types SmartQueueConfig, QueueRoute, - } from './types'; - diff --git a/libs/core/queue/src/queue-manager.ts b/libs/core/queue/src/queue-manager.ts index 2e46d1e..264d823 100644 --- a/libs/core/queue/src/queue-manager.ts +++ b/libs/core/queue/src/queue-manager.ts @@ -76,8 +76,9 @@ export class QueueManager { // Prepare queue configuration const workers = mergedOptions.workers ?? this.config.defaultQueueOptions?.workers ?? 1; - const concurrency = mergedOptions.concurrency ?? this.config.defaultQueueOptions?.concurrency ?? 1; - + const concurrency = + mergedOptions.concurrency ?? this.config.defaultQueueOptions?.concurrency ?? 1; + const queueConfig: QueueWorkerConfig = { workers, concurrency, @@ -180,7 +181,6 @@ export class QueueManager { return this.queues; } - /** * Get statistics for all queues */ @@ -449,4 +449,4 @@ export class QueueManager { getConfig(): Readonly { return { ...this.config }; } -} \ No newline at end of file +} diff --git a/libs/core/queue/src/queue.ts b/libs/core/queue/src/queue.ts index e8abd24..0328310 100644 --- a/libs/core/queue/src/queue.ts +++ b/libs/core/queue/src/queue.ts @@ -1,6 +1,7 @@ import { Queue as BullQueue, QueueEvents, Worker, type Job } from 'bullmq'; -import { handlerRegistry } from '@stock-bot/handlers'; -import type { JobData, JobOptions, ExtendedJobOptions, QueueStats, RedisConfig } from './types'; +// Handler registry will be injected +import type { HandlerRegistry } from '@stock-bot/handler-registry'; +import type { ExtendedJobOptions, JobData, JobOptions, QueueStats, RedisConfig } from './types'; import { getRedisConnection } from './utils'; // Logger interface for type safety @@ -17,6 +18,7 @@ export interface QueueWorkerConfig { workers?: number; concurrency?: number; startWorker?: boolean; + handlerRegistry?: HandlerRegistry; } /** @@ -30,6 +32,7 @@ export class Queue { private queueName: string; private redisConfig: RedisConfig; private readonly logger: Logger; + private readonly handlerRegistry?: HandlerRegistry; constructor( queueName: string, @@ -41,6 +44,7 @@ export class Queue { this.queueName = queueName; this.redisConfig = redisConfig; this.logger = logger || console; + this.handlerRegistry = config.handlerRegistry; const connection = getRedisConnection(redisConfig); @@ -338,7 +342,10 @@ export class Queue { try { // Look up handler in registry - const jobHandler = handlerRegistry.getOperation(handler, operation); + if (!this.handlerRegistry) { + throw new Error('Handler registry not configured for worker processing'); + } + const jobHandler = this.handlerRegistry.getOperation(handler, operation); if (!jobHandler) { throw new Error(`No handler found for ${handler}:${operation}`); @@ -390,5 +397,4 @@ export class Queue { getWorkerCount(): number { return this.workers.length; } - } diff --git a/libs/core/queue/src/rate-limiter.ts b/libs/core/queue/src/rate-limiter.ts index 06ba222..413a62f 100644 --- a/libs/core/queue/src/rate-limiter.ts +++ b/libs/core/queue/src/rate-limiter.ts @@ -271,7 +271,12 @@ export class QueueRateLimiter { limit, }; } catch (error) { - this.logger.error('Failed to get rate limit status', { queueName, handler, operation, error }); + this.logger.error('Failed to get rate limit status', { + queueName, + handler, + operation, + error, + }); return { queueName, handler, diff --git a/libs/core/queue/src/service-cache.ts b/libs/core/queue/src/service-cache.ts index 4ffb329..980ef45 100644 --- a/libs/core/queue/src/service-cache.ts +++ b/libs/core/queue/src/service-cache.ts @@ -1,6 +1,6 @@ import { createCache, type CacheProvider, type CacheStats } from '@stock-bot/cache'; -import type { RedisConfig } from './types'; import { generateCachePrefix } from './service-utils'; +import type { RedisConfig } from './types'; /** * Service-aware cache that uses the service's Redis DB @@ -132,7 +132,11 @@ export class ServiceCache implements CacheProvider { return this.cache.set(key, value, ttl); } - async updateField(key: string, updater: (current: T | null) => T, ttl?: number): Promise { + async updateField( + key: string, + updater: (current: T | null) => T, + ttl?: number + ): Promise { if (this.cache.updateField) { return this.cache.updateField(key, updater, ttl); } @@ -162,7 +166,6 @@ export class ServiceCache implements CacheProvider { } } - /** * Factory function to create service cache */ @@ -172,4 +175,4 @@ export function createServiceCache( options: { global?: boolean; logger?: any } = {} ): ServiceCache { return new ServiceCache(serviceName, redisConfig, options.global, options.logger); -} \ No newline at end of file +} diff --git a/libs/core/queue/src/service-utils.ts b/libs/core/queue/src/service-utils.ts index d6b3a5e..e29f9c5 100644 --- a/libs/core/queue/src/service-utils.ts +++ b/libs/core/queue/src/service-utils.ts @@ -1,53 +1,51 @@ -/** - * Service utilities for name normalization and auto-discovery - */ - -/** - * Normalize service name to kebab-case format - * Examples: - * - webApi -> web-api - * - dataIngestion -> data-ingestion - * - data-pipeline -> data-pipeline (unchanged) - */ -export function normalizeServiceName(serviceName: string): string { - // Handle camelCase to kebab-case conversion - const kebabCase = serviceName - .replace(/([a-z])([A-Z])/g, '$1-$2') - .toLowerCase(); - - return kebabCase; -} - -/** - * Generate cache prefix for a service - */ -export function generateCachePrefix(serviceName: string): string { - const normalized = normalizeServiceName(serviceName); - return `cache:${normalized}`; -} - -/** - * Generate full queue name with service namespace - */ -export function getFullQueueName(serviceName: string, handlerName: string): string { - const normalized = normalizeServiceName(serviceName); - // Use {service_handler} format for Dragonfly optimization and BullMQ compatibility - return `{${normalized}_${handlerName}}`; -} - -/** - * Parse a full queue name into service and handler - */ -export function parseQueueName(fullQueueName: string): { service: string; handler: string } | null { - // Match pattern {service_handler} - const match = fullQueueName.match(/^\{([^_]+)_([^}]+)\}$/); - - if (!match || !match[1] || !match[2]) { - return null; - } - - return { - service: match[1], - handler: match[2], - }; -} \ No newline at end of file +/** + * Service utilities for name normalization and auto-discovery + */ + +/** + * Normalize service name to kebab-case format + * Examples: + * - webApi -> web-api + * - dataIngestion -> data-ingestion + * - data-pipeline -> data-pipeline (unchanged) + */ +export function normalizeServiceName(serviceName: string): string { + // Handle camelCase to kebab-case conversion + const kebabCase = serviceName.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase(); + + return kebabCase; +} + +/** + * Generate cache prefix for a service + */ +export function generateCachePrefix(serviceName: string): string { + const normalized = normalizeServiceName(serviceName); + return `cache:${normalized}`; +} + +/** + * Generate full queue name with service namespace + */ +export function getFullQueueName(serviceName: string, handlerName: string): string { + const normalized = normalizeServiceName(serviceName); + // Use {service_handler} format for Dragonfly optimization and BullMQ compatibility + return `{${normalized}_${handlerName}}`; +} + +/** + * Parse a full queue name into service and handler + */ +export function parseQueueName(fullQueueName: string): { service: string; handler: string } | null { + // Match pattern {service_handler} + const match = fullQueueName.match(/^\{([^_]+)_([^}]+)\}$/); + + if (!match || !match[1] || !match[2]) { + return null; + } + + return { + service: match[1], + handler: match[2], + }; +} diff --git a/libs/core/queue/src/smart-queue-manager.ts b/libs/core/queue/src/smart-queue-manager.ts index 29cd599..d760706 100644 --- a/libs/core/queue/src/smart-queue-manager.ts +++ b/libs/core/queue/src/smart-queue-manager.ts @@ -1,16 +1,10 @@ import { Queue as BullQueue, type Job } from 'bullmq'; -import { handlerRegistry } from '@stock-bot/handlers'; +import type { HandlerRegistry } from '@stock-bot/handler-registry'; import { getLogger, type Logger } from '@stock-bot/logger'; -import { QueueManager } from './queue-manager'; import { Queue } from './queue'; -import type { - SmartQueueConfig, - QueueRoute, - JobData, - JobOptions, - RedisConfig -} from './types'; +import { QueueManager } from './queue-manager'; import { getFullQueueName, parseQueueName } from './service-utils'; +import type { JobData, JobOptions, QueueRoute, RedisConfig, SmartQueueConfig } from './types'; import { getRedisConnection } from './utils'; /** @@ -23,30 +17,33 @@ export class SmartQueueManager extends QueueManager { private connections = new Map(); // Redis connections by DB private producerQueues = new Map(); // For cross-service sending private _logger: Logger; + private handlerRegistry?: HandlerRegistry; - constructor(config: SmartQueueConfig, logger?: Logger) { + constructor(config: SmartQueueConfig, handlerRegistry?: HandlerRegistry, logger?: Logger) { // Always use DB 0 for queues (unified queue database) const modifiedConfig = { ...config, redis: { ...config.redis, - db: 0, // All queues in DB 0 + db: 0, // All queues in DB 0 }, }; super(modifiedConfig, logger); - + this.serviceName = config.serviceName; + this.handlerRegistry = handlerRegistry; this._logger = logger || getLogger('SmartQueueManager'); - // Auto-discover routes if enabled - if (config.autoDiscoverHandlers !== false) { + // Auto-discover routes if enabled and registry provided + if (config.autoDiscoverHandlers !== false && handlerRegistry) { this.discoverQueueRoutes(); } this._logger.info('SmartQueueManager initialized', { service: this.serviceName, discoveredRoutes: this.queueRoutes.size, + hasRegistry: !!handlerRegistry, }); } @@ -54,26 +51,31 @@ export class SmartQueueManager extends QueueManager { * Discover all available queue routes from handler registry */ private discoverQueueRoutes(): void { + if (!this.handlerRegistry) { + this._logger.warn('No handler registry provided, skipping route discovery'); + return; + } + try { - const handlers = handlerRegistry.getAllHandlers(); - for (const [handlerName, handlerConfig] of handlers) { + const handlers = this.handlerRegistry.getAllMetadata(); + for (const [handlerName, metadata] of handlers) { // Get the service that registered this handler - const ownerService = handlerRegistry.getHandlerService(handlerName); + const ownerService = metadata.service; if (ownerService) { const fullName = getFullQueueName(ownerService, handlerName); - + this.queueRoutes.set(handlerName, { fullName, service: ownerService, handler: handlerName, - db: 0, // All queues in DB 0 - operations: Object.keys(handlerConfig.operations || {}), + db: 0, // All queues in DB 0 + operations: metadata.operations.map((op: any) => op.name), }); this._logger.trace('Discovered queue route', { handler: handlerName, service: ownerService, - operations: Object.keys(handlerConfig.operations || {}).length, + operations: metadata.operations.length, }); } else { this._logger.warn('Handler has no service ownership', { handlerName }); @@ -81,24 +83,25 @@ export class SmartQueueManager extends QueueManager { } // Also discover handlers registered by the current service - const myHandlers = handlerRegistry.getServiceHandlers(this.serviceName); - for (const handlerName of myHandlers) { + const myHandlers = this.handlerRegistry.getServiceHandlers(this.serviceName); + for (const metadata of myHandlers) { + const handlerName = metadata.name; if (!this.queueRoutes.has(handlerName)) { const fullName = getFullQueueName(this.serviceName, handlerName); this.queueRoutes.set(handlerName, { fullName, service: this.serviceName, handler: handlerName, - db: 0, // All queues in DB 0 + db: 0, // All queues in DB 0 }); } } this._logger.info('Queue routes discovered', { totalRoutes: this.queueRoutes.size, - routes: Array.from(this.queueRoutes.values()).map(r => ({ - handler: r.handler, - service: r.service + routes: Array.from(this.queueRoutes.values()).map(r => ({ + handler: r.handler, + service: r.service, })), }); } catch (error) { @@ -129,10 +132,10 @@ export class SmartQueueManager extends QueueManager { override getQueue(queueName: string, options = {}): Queue { // Check if this is already a full queue name (service:handler format) const parsed = parseQueueName(queueName); - + let fullQueueName: string; let isOwnQueue: boolean; - + if (parsed) { // Already in service:handler format fullQueueName = queueName; @@ -142,20 +145,19 @@ export class SmartQueueManager extends QueueManager { fullQueueName = getFullQueueName(this.serviceName, queueName); isOwnQueue = true; } - + // For cross-service queues, create without workers (producer-only) if (!isOwnQueue) { return super.getQueue(fullQueueName, { ...options, - workers: 0, // No workers for other services' queues + workers: 0, // No workers for other services' queues }); } - + // For own service queues, use configured workers return super.getQueue(fullQueueName, options); } - /** * Send a job to any queue (local or remote) * This is the main method for cross-service communication @@ -236,7 +238,7 @@ export class SmartQueueManager extends QueueManager { fullName: queueName, service: parsed.service, handler: parsed.handler, - db: 0, // All queues in DB 0 + db: 0, // All queues in DB 0 }; } @@ -247,13 +249,13 @@ export class SmartQueueManager extends QueueManager { } // Try to find in handler registry - const ownerService = handlerRegistry.getHandlerService(queueName); + const ownerService = this.handlerRegistry?.getHandlerService(queueName); if (ownerService) { return { fullName: getFullQueueName(ownerService, queueName), service: ownerService, handler: queueName, - db: 0, // All queues in DB 0 + db: 0, // All queues in DB 0 }; } @@ -281,7 +283,7 @@ export class SmartQueueManager extends QueueManager { */ getAllQueues(): Record { const allQueues: Record = {}; - + // Get all worker queues using public API const workerQueueNames = this.getQueueNames(); for (const name of workerQueueNames) { @@ -296,7 +298,7 @@ export class SmartQueueManager extends QueueManager { } } } - + // Add producer queues for (const [name, queue] of this.producerQueues) { // Use the simple handler name without service prefix for display @@ -306,7 +308,7 @@ export class SmartQueueManager extends QueueManager { allQueues[simpleName] = queue; } } - + // If no queues found, create from discovered routes if (Object.keys(allQueues).length === 0) { for (const [handlerName, route] of this.queueRoutes) { @@ -317,7 +319,7 @@ export class SmartQueueManager extends QueueManager { }); } } - + return allQueues; } @@ -350,11 +352,11 @@ export class SmartQueueManager extends QueueManager { let workersStarted = 0; const queues = this.getQueues(); - + for (const [queueName, queue] of queues) { // Parse queue name to check if it belongs to this service const parsed = parseQueueName(queueName); - + // Skip if not our service's queue if (parsed && parsed.service !== this.serviceName) { this._logger.trace('Skipping workers for cross-service queue', { @@ -364,7 +366,7 @@ export class SmartQueueManager extends QueueManager { }); continue; } - + const workerCount = this.getConfig().defaultQueueOptions?.workers || 1; const concurrency = this.getConfig().defaultQueueOptions?.concurrency || 1; @@ -399,7 +401,8 @@ export class SmartQueueManager extends QueueManager { // Close additional connections for (const [db, connection] of this.connections) { - if (db !== 0) { // Don't close our main connection (DB 0 for queues) + if (db !== 0) { + // Don't close our main connection (DB 0 for queues) connection.disconnect(); this._logger.debug('Closed Redis connection', { db }); } @@ -408,4 +411,4 @@ export class SmartQueueManager extends QueueManager { // Call parent shutdown await super.shutdown(); } -} \ No newline at end of file +} diff --git a/libs/core/queue/src/types.ts b/libs/core/queue/src/types.ts index 780b8ff..2712d74 100644 --- a/libs/core/queue/src/types.ts +++ b/libs/core/queue/src/types.ts @@ -4,14 +4,14 @@ import type { JobOptions, QueueStats } from '@stock-bot/types'; // Re-export handler and queue types from shared types package export type { HandlerConfig, - HandlerConfigWithSchedule, - JobHandler, - ScheduledJob, + HandlerConfigWithSchedule, + JobHandler, + ScheduledJob, TypedJobHandler, JobData, JobOptions, QueueWorkerConfig, - QueueStats + QueueStats, } from '@stock-bot/types'; export interface ProcessOptions { @@ -92,7 +92,6 @@ export interface QueueConfig extends QueueManagerConfig { enableMetrics?: boolean; } - // Extended batch job data for queue implementation export interface BatchJobData { payloadKey: string; diff --git a/libs/core/queue/tsconfig.json b/libs/core/queue/tsconfig.json index 8a95639..64b10ca 100644 --- a/libs/core/queue/tsconfig.json +++ b/libs/core/queue/tsconfig.json @@ -8,6 +8,7 @@ "include": ["src/**/*"], "references": [ { "path": "../cache" }, + { "path": "../handler-registry" }, { "path": "../handlers" }, { "path": "../logger" }, { "path": "../types" } diff --git a/libs/core/shutdown/src/shutdown.ts b/libs/core/shutdown/src/shutdown.ts index d31a853..bff6ac6 100644 --- a/libs/core/shutdown/src/shutdown.ts +++ b/libs/core/shutdown/src/shutdown.ts @@ -8,13 +8,13 @@ * - Platform-specific signal support (Windows/Unix) */ +import { getLogger } from '@stock-bot/logger'; import type { PrioritizedShutdownCallback, ShutdownCallback, ShutdownOptions, ShutdownResult, } from './types'; -import { getLogger } from '@stock-bot/logger'; // Global flag that works across all processes/workers declare global { diff --git a/libs/core/types/src/decorators.ts b/libs/core/types/src/decorators.ts index 1b004f3..d73db35 100644 --- a/libs/core/types/src/decorators.ts +++ b/libs/core/types/src/decorators.ts @@ -1,41 +1,41 @@ -/** - * Decorator Type Definitions - * Type definitions for handler decorators - */ - -/** - * Schedule configuration for operations - */ -export interface ScheduleConfig { - cronPattern: string; - priority?: number; - immediately?: boolean; - description?: string; -} - -/** - * Decorator metadata stored on classes - */ -export interface DecoratorMetadata { - handlerName?: string; - operations?: Array<{ - name: string; - methodName: string; - schedules?: ScheduleConfig[]; - }>; - disabled?: boolean; -} - -/** - * Type for decorator factories - */ -export type DecoratorFactory = (target: T, context?: any) => T | void; - -/** - * Type for method decorators - */ -export type MethodDecoratorFactory = ( - target: any, - propertyKey: string, - descriptor?: PropertyDescriptor -) => any; \ No newline at end of file +/** + * Decorator Type Definitions + * Type definitions for handler decorators + */ + +/** + * Schedule configuration for operations + */ +export interface ScheduleConfig { + cronPattern: string; + priority?: number; + immediately?: boolean; + description?: string; +} + +/** + * Decorator metadata stored on classes + */ +export interface DecoratorMetadata { + handlerName?: string; + operations?: Array<{ + name: string; + methodName: string; + schedules?: ScheduleConfig[]; + }>; + disabled?: boolean; +} + +/** + * Type for decorator factories + */ +export type DecoratorFactory = (target: T, context?: any) => T | void; + +/** + * Type for method decorators + */ +export type MethodDecoratorFactory = ( + target: any, + propertyKey: string, + descriptor?: PropertyDescriptor +) => any; diff --git a/libs/core/types/src/handlers.ts b/libs/core/types/src/handlers.ts index 9985efc..ed6c16f 100644 --- a/libs/core/types/src/handlers.ts +++ b/libs/core/types/src/handlers.ts @@ -70,4 +70,3 @@ export interface OperationMetadata { description?: string; validation?: (input: unknown) => boolean; } - diff --git a/libs/core/types/src/index.ts b/libs/core/types/src/index.ts index 8c5c2ae..af50f21 100644 --- a/libs/core/types/src/index.ts +++ b/libs/core/types/src/index.ts @@ -64,6 +64,24 @@ export type { // Export service container interface export type { IServiceContainer } from './service-container'; +// Export service types +export type { + ServiceTypes, + Logger, + CacheProvider, + QueueManager, + Queue, + MongoDBClient, + PostgresClient, + QuestDBClient, + Browser, + BrowserContext, + Page, + ProxyManager, + ProxyInfo, + ProxyStats, +} from './services'; + // Export decorator types export type { ScheduleConfig, @@ -73,10 +91,4 @@ export type { } from './decorators'; // Export queue types -export type { - JobData, - JobOptions, - QueueStats, - BatchJobData, - QueueWorkerConfig, -} from './queue'; +export type { JobData, JobOptions, QueueStats, BatchJobData, QueueWorkerConfig } from './queue'; diff --git a/libs/core/types/src/queue.ts b/libs/core/types/src/queue.ts index bf8bfa0..83bad76 100644 --- a/libs/core/types/src/queue.ts +++ b/libs/core/types/src/queue.ts @@ -1,64 +1,64 @@ -/** - * Queue Type Definitions - * Types specific to queue operations - */ - -/** - * Job data structure for queue operations - */ -export interface JobData { - handler: string; - operation: string; - payload: T; - priority?: number; -} - -/** - * Queue job options - */ -export interface JobOptions { - priority?: number; - delay?: number; - attempts?: number; - backoff?: { - type: 'exponential' | 'fixed'; - delay: number; - }; - removeOnComplete?: boolean | number; - removeOnFail?: boolean | number; - timeout?: number; -} - -/** - * Queue statistics - */ -export interface QueueStats { - waiting: number; - active: number; - completed: number; - failed: number; - delayed: number; - paused: boolean; - workers?: number; -} - -/** - * Batch job configuration - */ -export interface BatchJobData { - payloadKey: string; - batchIndex: number; - totalBatches: number; - items: unknown[]; -} - -/** - * Queue worker configuration - */ -export interface QueueWorkerConfig { - concurrency?: number; - maxStalledCount?: number; - stalledInterval?: number; - lockDuration?: number; - lockRenewTime?: number; -} \ No newline at end of file +/** + * Queue Type Definitions + * Types specific to queue operations + */ + +/** + * Job data structure for queue operations + */ +export interface JobData { + handler: string; + operation: string; + payload: T; + priority?: number; +} + +/** + * Queue job options + */ +export interface JobOptions { + priority?: number; + delay?: number; + attempts?: number; + backoff?: { + type: 'exponential' | 'fixed'; + delay: number; + }; + removeOnComplete?: boolean | number; + removeOnFail?: boolean | number; + timeout?: number; +} + +/** + * Queue statistics + */ +export interface QueueStats { + waiting: number; + active: number; + completed: number; + failed: number; + delayed: number; + paused: boolean; + workers?: number; +} + +/** + * Batch job configuration + */ +export interface BatchJobData { + payloadKey: string; + batchIndex: number; + totalBatches: number; + items: unknown[]; +} + +/** + * Queue worker configuration + */ +export interface QueueWorkerConfig { + concurrency?: number; + maxStalledCount?: number; + stalledInterval?: number; + lockDuration?: number; + lockRenewTime?: number; +} diff --git a/libs/core/types/src/service-container.ts b/libs/core/types/src/service-container.ts index 6a69bee..de23db3 100644 --- a/libs/core/types/src/service-container.ts +++ b/libs/core/types/src/service-container.ts @@ -1,28 +1,17 @@ -/** - * Service Container Interface - * Pure interface definition with no dependencies - * Used by both DI and Handlers packages - */ - -/** - * Universal service container interface - * Provides access to all common services in a type-safe manner - * Designed to work across different service contexts - */ -export interface IServiceContainer { - // Core infrastructure - readonly logger: any; // Logger instance - readonly cache?: any; // Cache provider (Redis/Dragonfly) - optional - readonly globalCache?: any; // Global cache provider (shared across services) - optional - readonly queue?: any; // Queue manager (BullMQ) - optional - readonly proxy?: any; // Proxy manager service - optional (depends on cache) - readonly browser?: any; // Browser automation (Playwright) - - // Database clients - all optional to support selective enabling - readonly mongodb?: any; // MongoDB client - readonly postgres?: any; // PostgreSQL client - readonly questdb?: any; // QuestDB client (time-series) - - // Optional extensions for future use - readonly custom?: Record; -} \ No newline at end of file +/** + * Service Container Interface + * Pure interface definition with full type safety + * Used by both DI and Handlers packages + */ + +import type { ServiceTypes } from './services'; + +/** + * Universal service container interface + * Provides access to all common services in a type-safe manner + * Designed to work across different service contexts + */ +export interface IServiceContainer extends ServiceTypes { + // Optional extensions for future use + readonly custom?: Record; +} diff --git a/libs/core/types/src/services.ts b/libs/core/types/src/services.ts new file mode 100644 index 0000000..280970f --- /dev/null +++ b/libs/core/types/src/services.ts @@ -0,0 +1,311 @@ +/** + * Service Type Definitions + * Provides full type safety for all injected services + */ + +// Import actual service types from their packages +// These are type-only imports, so no runtime dependencies + +// Logger types +export interface Logger { + info(message: string, meta?: Record): void; + error(message: string, meta?: Record): void; + warn(message: string, meta?: Record): void; + debug(message: string, meta?: Record): void; + trace?(message: string, meta?: Record): void; + child?(name: string, context?: Record): Logger; +} + +// Cache types - matches the actual CacheProvider interface from @stock-bot/cache +export interface CacheProvider { + get(key: string): Promise; + set( + key: string, + value: T, + options?: + | number + | { + ttl?: number; + preserveTTL?: boolean; + onlyIfExists?: boolean; + onlyIfNotExists?: boolean; + getOldValue?: boolean; + } + ): Promise; + del(key: string): Promise; + exists(key: string): Promise; + clear(): Promise; + keys(pattern: string): Promise; + getStats(): CacheStats; + health(): Promise; + waitForReady(timeout?: number): Promise; + isReady(): boolean; +} + +export interface CacheStats { + hits: number; + misses: number; + errors: number; + hitRate: number; + total: number; + uptime: number; +} + +// Queue Manager types +export interface QueueManager { + getQueue(queueName: string, options?: any): Queue; + createQueue(queueName: string, options?: any): Queue; + getOrCreateQueue(queueName: string, options?: any): Queue; + queueExists(queueName: string): boolean; + deleteQueue(queueName: string): Promise; + getQueueStats(queueName?: string): Promise; + getBatchCache(queueName: string): CacheProvider | null; + getRateLimiter(): any; + checkRateLimit(key: string, rule?: string): Promise<{ allowed: boolean; retryAfter?: number }>; + shutdown(): Promise; +} + +export interface Queue { + add(name: string, data: any, opts?: any): Promise; + addBulk(jobs: Array<{ name: string; data: any; opts?: any }>): Promise; + process(concurrency: number, processor: any): Promise; + process(name: string, concurrency: number, processor: any): Promise; + process(processor: any): Promise; + on(event: string, listener: (...args: any[]) => void): void; + pause(): Promise; + resume(): Promise; + close(): Promise; + clean(grace: number, limit?: number, type?: string): Promise; + getJobs(types: string[], start?: number, end?: number, asc?: boolean): Promise; + getJobCounts(...types: string[]): Promise>; + name: string; +} + +// MongoDB types +export interface MongoDBClient { + connect(): Promise; + disconnect(): Promise; + isConnected(): boolean; + getDb(dbName?: string): any; // MongoDB Db type + collection(name: string, dbName?: string): any; // MongoDB Collection + createCollection(name: string, options?: any, dbName?: string): Promise; + dropCollection(name: string, dbName?: string): Promise; + listCollections(dbName?: string): Promise>; + find(collection: string, filter?: any, options?: any, dbName?: string): Promise; + findOne( + collection: string, + filter: any, + options?: any, + dbName?: string + ): Promise; + insertOne(collection: string, document: T, options?: any, dbName?: string): Promise; + insertMany( + collection: string, + documents: T[], + options?: any, + dbName?: string + ): Promise; + updateOne( + collection: string, + filter: any, + update: any, + options?: any, + dbName?: string + ): Promise; + updateMany( + collection: string, + filter: any, + update: any, + options?: any, + dbName?: string + ): Promise; + deleteOne(collection: string, filter: any, options?: any, dbName?: string): Promise; + deleteMany(collection: string, filter: any, options?: any, dbName?: string): Promise; + countDocuments(collection: string, filter?: any, options?: any, dbName?: string): Promise; + aggregate( + collection: string, + pipeline: any[], + options?: any, + dbName?: string + ): Promise; + createIndex(collection: string, indexSpec: any, options?: any, dbName?: string): Promise; + dropIndex(collection: string, indexName: string, options?: any, dbName?: string): Promise; + listIndexes(collection: string, dbName?: string): Promise; + batchUpsert( + collection: string, + documents: T[], + uniqueKeys: string | string[], + options?: any, + dbName?: string + ): Promise; + batchUpsertStock( + collection: string, + documents: T[], + uniqueKeys: string | string[], + options?: any + ): Promise; + batchUpsertTrading( + collection: string, + documents: T[], + uniqueKeys: string | string[], + options?: any + ): Promise; + batchUpsertAnalytics( + collection: string, + documents: T[], + uniqueKeys: string | string[], + options?: any + ): Promise; +} + +// PostgreSQL types +export interface PostgresClient { + query(text: string, params?: any[]): Promise<{ rows: T[]; rowCount: number }>; + queryOne(text: string, params?: any[]): Promise; + execute(text: string, params?: any[]): Promise<{ rowCount: number }>; + transaction(callback: (client: any) => Promise): Promise; + insert(table: string, data: Record, returning?: string | string[]): Promise; + update( + table: string, + data: Record, + where: Record, + returning?: string | string[] + ): Promise; + delete(table: string, where: Record, returning?: string | string[]): Promise; + upsert( + table: string, + data: Record, + conflictColumns: string | string[], + returning?: string | string[] + ): Promise; + batchInsert( + table: string, + data: Record[], + returning?: string | string[] + ): Promise; + batchUpsert( + table: string, + data: Record[], + conflictColumns: string | string[], + returning?: string | string[] + ): Promise; + exists(table: string, where: Record): Promise; + count(table: string, where?: Record): Promise; + getClient(): Promise; + releaseClient(client: any): void; + end(): Promise; + isConnected(): boolean; +} + +// QuestDB types +export interface QuestDBClient { + connect(): Promise; + disconnect(): Promise; + isConnected(): boolean; + query(sql: string): Promise; + insert(table: string, data: Record | Record[]): Promise; + insertBatch(table: string, data: Record[]): Promise; + createTable(tableDef: any): Promise; + dropTable(tableName: string): Promise; + tableExists(tableName: string): Promise; + getTableSchema(tableName: string): Promise; + flush(): Promise; + insertWithTimestamp( + table: string, + data: Record, + timestamp?: Date | number + ): Promise; + batchInsertWithTimestamp( + table: string, + data: Array & { timestamp?: Date | number }> + ): Promise; +} + +// Browser types (Playwright) +export interface Browser { + newPage(): Promise; + newContext(options?: any): Promise; + close(): Promise; + isConnected(): boolean; + version(): string; +} + +export interface BrowserContext { + newPage(): Promise; + close(): Promise; + pages(): Page[]; + setDefaultTimeout(timeout: number): void; + setDefaultNavigationTimeout(timeout: number): void; + addCookies(cookies: any[]): Promise; + clearCookies(): Promise; +} + +export interface Page { + goto(url: string, options?: any): Promise; + waitForSelector(selector: string, options?: any): Promise; + click(selector: string, options?: any): Promise; + fill(selector: string, value: string, options?: any): Promise; + evaluate(pageFunction: any, arg?: any): Promise; + evaluateHandle(pageFunction: any, arg?: any): Promise; + screenshot(options?: any): Promise; + pdf(options?: any): Promise; + content(): Promise; + title(): Promise; + url(): string; + close(): Promise; + waitForTimeout(timeout: number): Promise; + waitForLoadState(state?: string, options?: any): Promise; + locator(selector: string): any; + $$(selector: string): Promise; + textContent(selector: string): Promise; +} + +// Proxy Manager types +export interface ProxyManager { + getProxy(key?: string): Promise; + getProxies(count: number, key?: string): Promise; + releaseProxy(proxy: ProxyInfo | string): Promise; + markProxyFailed(proxy: ProxyInfo | string, reason?: string): Promise; + getStats(): Promise; + resetProxy(proxy: ProxyInfo | string): Promise; + blacklistProxy(proxy: ProxyInfo | string, duration?: number): Promise; + isBlacklisted(proxy: ProxyInfo | string): Promise; + refreshProxies(): Promise; +} + +export interface ProxyInfo { + id: string; + host: string; + port: number; + username?: string; + password?: string; + protocol?: string; + country?: string; + lastUsed?: Date; + failureCount?: number; + successCount?: number; + averageResponseTime?: number; +} + +export interface ProxyStats { + total: number; + available: number; + inUse: number; + failed: number; + blacklisted: number; +} + +/** + * Complete service types for dependency injection + */ +export interface ServiceTypes { + logger: Logger; + cache?: CacheProvider; + globalCache?: CacheProvider; + queue?: QueueManager; + proxy?: ProxyManager; + browser?: Browser; + mongodb?: MongoDBClient; + postgres?: PostgresClient; + questdb?: QuestDBClient; +} diff --git a/libs/data/mongodb/src/client.ts b/libs/data/mongodb/src/client.ts index 251b6b7..8189945 100644 --- a/libs/data/mongodb/src/client.ts +++ b/libs/data/mongodb/src/client.ts @@ -95,7 +95,7 @@ export class MongoDBClient { if (this.dynamicPoolConfig?.enabled) { this.startPoolMonitoring(); } - + return; } catch (error) { lastError = error as Error; @@ -108,7 +108,7 @@ export class MongoDBClient { } this.logger.error(`MongoDB connection attempt ${attempt} failed:`, error); - + if (this.client) { await this.client.close(); this.client = null; diff --git a/libs/data/postgres/src/client.ts b/libs/data/postgres/src/client.ts index c54df1c..f202b17 100644 --- a/libs/data/postgres/src/client.ts +++ b/libs/data/postgres/src/client.ts @@ -442,15 +442,20 @@ export class PostgreSQLClient { user: this.config.username, passwordLength: this.config.password?.length, passwordType: typeof this.config.password, - passwordValue: this.config.password ? `${this.config.password.substring(0, 3)}***` : 'NO_PASSWORD', + passwordValue: this.config.password + ? `${this.config.password.substring(0, 3)}***` + : 'NO_PASSWORD', }); - + const poolConfig = { host: this.config.host, port: this.config.port, database: this.config.database, user: this.config.username, - password: typeof this.config.password === 'string' ? this.config.password : String(this.config.password || ''), + password: + typeof this.config.password === 'string' + ? this.config.password + : String(this.config.password || ''), min: this.config.poolSettings?.min, max: this.config.poolSettings?.max, idleTimeoutMillis: this.config.poolSettings?.idleTimeoutMillis, @@ -465,7 +470,7 @@ export class PostgreSQLClient { } : false, }; - + return poolConfig; } diff --git a/libs/data/questdb/src/client.ts b/libs/data/questdb/src/client.ts index e7ead4b..efd9f49 100644 --- a/libs/data/questdb/src/client.ts +++ b/libs/data/questdb/src/client.ts @@ -430,24 +430,19 @@ export class QuestDBClient { // Only add user/password if they are provided if (this.config.user) { - this.logger.debug('Adding user to QuestDB pool config:', this.config.user); config.user = this.config.user; } else { - this.logger.debug('No user provided for QuestDB connection'); } if (this.config.password) { - this.logger.debug('Adding password to QuestDB pool config'); config.password = this.config.password; } else { - this.logger.debug('No password provided for QuestDB connection'); } - this.logger.debug('Final QuestDB pool config:', { ...config, password: config.password ? '[REDACTED]' : undefined, diff --git a/libs/data/questdb/src/types.ts b/libs/data/questdb/src/types.ts index f1514d6..d219abc 100644 --- a/libs/data/questdb/src/types.ts +++ b/libs/data/questdb/src/types.ts @@ -39,7 +39,6 @@ export interface QuestDBConnectionOptions { */ export type QuestDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy'; - export interface QuestDBMetrics { queriesPerSecond: number; insertsPerSecond: number; diff --git a/libs/services/browser/src/types.ts b/libs/services/browser/src/types.ts index 37df90b..3ff83f5 100644 --- a/libs/services/browser/src/types.ts +++ b/libs/services/browser/src/types.ts @@ -6,7 +6,6 @@ export interface BrowserOptions { enableNetworkLogging?: boolean; } - export interface ScrapingResult { data: T; url: string; diff --git a/libs/services/proxy/src/proxy-manager.ts b/libs/services/proxy/src/proxy-manager.ts index b53fdb3..71355dc 100644 --- a/libs/services/proxy/src/proxy-manager.ts +++ b/libs/services/proxy/src/proxy-manager.ts @@ -282,21 +282,18 @@ export class ProxyManager { } const { apiKey, apiUrl } = this.config.webshare; - + this.logger.info('Fetching proxies from WebShare API', { apiUrl }); try { - const response = await fetch( - `${apiUrl}proxy/list/?mode=direct&page=1&page_size=100`, - { - method: 'GET', - headers: { - Authorization: `Token ${apiKey}`, - 'Content-Type': 'application/json', - }, - signal: AbortSignal.timeout(10000), // 10 second timeout - } - ); + const response = await fetch(`${apiUrl}proxy/list/?mode=direct&page=1&page_size=100`, { + method: 'GET', + headers: { + Authorization: `Token ${apiKey}`, + 'Content-Type': 'application/json', + }, + signal: AbortSignal.timeout(10000), // 10 second timeout + }); if (!response.ok) { throw new Error(`WebShare API request failed: ${response.status} ${response.statusText}`); @@ -370,13 +367,13 @@ export class ProxyManager { // Fetch proxies on startup if enabled if (this.config.enabled && this.config.webshare) { this.logger.info('Proxy fetching is enabled, fetching proxies from WebShare...'); - + try { const proxies = await this.fetchWebShareProxies(); if (proxies.length === 0) { throw new Error('No proxies fetched from WebShare API'); } - + await this.updateProxies(proxies); this.logger.info('ProxyManager initialized with fresh proxies', { count: proxies.length, @@ -385,10 +382,14 @@ export class ProxyManager { } catch (error) { // If proxy fetching is enabled but fails, the service should not start this.logger.error('Failed to fetch proxies during initialization', { error }); - throw new Error(`ProxyManager initialization failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + throw new Error( + `ProxyManager initialization failed: ${error instanceof Error ? error.message : 'Unknown error'}` + ); } } else { - this.logger.info('ProxyManager initialized without fetching proxies (disabled or not configured)'); + this.logger.info( + 'ProxyManager initialized without fetching proxies (disabled or not configured)' + ); } } } diff --git a/tsconfig.json b/tsconfig.json index 2f2388d..23d15aa 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -60,7 +60,7 @@ // Paths and output "baseUrl": ".", "paths": { - "@stock-bot/*": ["libs/*/src"], + "@stock-bot/*": ["libs/*/src"] } }, "exclude": ["node_modules", "dist"] diff --git a/tsconfig.unused.json b/tsconfig.unused.json index 4781c1a..db98298 100644 --- a/tsconfig.unused.json +++ b/tsconfig.unused.json @@ -1,19 +1,14 @@ -{ - "extends": "./tsconfig.json", - "include": [ - "apps/**/*.ts", - "apps/**/*.tsx", - "libs/**/*.ts", - "libs/**/*.tsx" - ], - "exclude": [ - "node_modules", - "dist", - "**/dist/**", - "**/node_modules/**", - "**/*.test.ts", - "**/*.spec.ts", - "**/test/**", - "**/tests/**" - ] -} \ No newline at end of file +{ + "extends": "./tsconfig.json", + "include": ["apps/**/*.ts", "apps/**/*.tsx", "libs/**/*.ts", "libs/**/*.tsx"], + "exclude": [ + "node_modules", + "dist", + "**/dist/**", + "**/node_modules/**", + "**/*.test.ts", + "**/*.spec.ts", + "**/test/**", + "**/tests/**" + ] +}